1 /*******************************************************************************
2 ################################################################################
3 # Copyright (c) [2017-2019] [Radisys] #
5 # Licensed under the Apache License, Version 2.0 (the "License"); #
6 # you may not use this file except in compliance with the License. #
7 # You may obtain a copy of the License at #
9 # http://www.apache.org/licenses/LICENSE-2.0 #
11 # Unless required by applicable law or agreed to in writing, software #
12 # distributed under the License is distributed on an "AS IS" BASIS, #
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
14 # See the License for the specific language governing permissions and #
15 # limitations under the License. #
16 ################################################################################
17 *******************************************************************************/
19 /************************************************************************
25 Desc: C source code for Entry point fucntions
29 **********************************************************************/
31 /** @file rg_sch_cmn.c
32 @brief This file implements the schedulers main access to MAC layer code.
35 static const char* RLOG_MODULE_NAME="MAC";
36 static int RLOG_FILE_ID=187;
37 static int RLOG_MODULE_ID=4096;
39 /* header include files -- defines (.h) */
40 #include "envopt.h" /* environment options */
41 #include "envdep.h" /* environment dependent */
42 #include "envind.h" /* environment independent */
43 #include "gen.h" /* general layer */
44 #include "ssi.h" /* system service interface */
45 #include "cm_hash.h" /* common hash list */
46 #include "cm_llist.h" /* common linked list library */
47 #include "cm_err.h" /* common error */
48 #include "cm_lte.h" /* common LTE */
55 #include "rg_sch_err.h"
56 #include "rg_sch_inf.h"
58 #include "rg_sch_cmn.h"
59 #include "rl_interface.h"
60 #include "rl_common.h"
62 /* header/extern include files (.x) */
63 #include "gen.x" /* general layer typedefs */
64 #include "ssi.x" /* system services typedefs */
65 #include "cm5.x" /* common timers */
66 #include "cm_hash.x" /* common hash list */
67 #include "cm_lib.x" /* common library */
68 #include "cm_llist.x" /* common linked list */
69 #include "cm_mblk.x" /* memory management */
70 #include "cm_tkns.x" /* common tokens */
71 #include "cm_lte.x" /* common tokens */
72 #include "tfu.x" /* TFU types */
73 #include "lrg.x" /* layer management typedefs for MAC */
74 #include "rgr.x" /* layer management typedefs for MAC */
75 #include "rgm.x" /* layer management typedefs for MAC */
76 #include "rg_sch_inf.x" /* typedefs for Scheduler */
77 #include "rg_sch.x" /* typedefs for Scheduler */
78 #include "rg_sch_cmn.x" /* typedefs for Scheduler */
80 #include "lrg.x" /* Stats Structures */
81 #endif /* MAC_SCH_STATS */
84 #endif /* __cplusplus */
87 EXTERN U32 emtcStatsUlTomSrInd;
88 EXTERN U32 emtcStatsUlBsrTmrTxp;
91 #define RG_ITBS_DIFF(_x, _y) ((_x) > (_y) ? (_x) - (_y) : (_y) - (_x))
92 EXTERN Void rgSCHSc1UlInit ARGS((RgUlSchdApis *apis));
93 #ifdef RG_PHASE2_SCHED
94 EXTERN Void rgSCHRrUlInit ARGS((RgUlSchdApis *apis));
96 EXTERN Void rgSCHEmtcHqInfoFree ARGS((RgSchCellCb *cell, RgSchDlHqProcCb *hqP));
97 EXTERN Void rgSCHEmtcRrUlInit ARGS((RgUlSchdApis *apis));
98 EXTERN Void rgSCHEmtcCmnDlInit ARGS((Void));
99 EXTERN Void rgSCHEmtcCmnUlInit ARGS((Void));
100 EXTERN Void rgSCHEmtcCmnUeNbReset ARGS((RgSchUeCb *ueCb));
101 EXTERN RgSchCmnCqiToTbs *rgSchEmtcCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
103 EXTERN Void rgSCHMaxciUlInit ARGS((RgUlSchdApis *apis));
104 EXTERN Void rgSCHPfsUlInit ARGS((RgUlSchdApis *apis));
106 EXTERN Void rgSCHSc1DlInit ARGS((RgDlSchdApis *apis));
107 #ifdef RG_PHASE2_SCHED
108 EXTERN Void rgSCHRrDlInit ARGS((RgDlSchdApis *apis));
110 EXTERN Void rgSCHEmtcRrDlInit ARGS((RgDlEmtcSchdApis *apis));
112 EXTERN Void rgSCHMaxciDlInit ARGS((RgDlSchdApis *apis));
113 EXTERN Void rgSCHPfsDlInit ARGS((RgDlSchdApis *apis));
115 EXTERN Void rgSCHDlfsInit ARGS((RgDlfsSchdApis *apis));
119 EXTERN Void rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl ARGS((RgSchCellCb *cell));
120 EXTERN Void rgSCHCmnGetEmtcDciFrmtSizes ARGS((RgSchCellCb *cell));
121 EXTERN Void rgSCHEmtcRrUlProcRmvFrmRetx ARGS((RgSchCellCb *cell, RgSchUlHqProcCb *proc));
122 EXTERN S16 rgSCHCmnPrecompEmtcMsg3Vars
124 RgSchCmnUlCell *cellUl,
130 PUBLIC Void rgSCHEmtcCmnUeCcchSduDel
135 EXTERN Void rgSCHEmtcRmvFrmTaLst
137 RgSchCmnDlCell *cellDl,
140 EXTERN Void rgSCHEmtcInitTaLst
142 RgSchCmnDlCell *cellDl
144 EXTERN Void rgSCHEmtcAddToTaLst
146 RgSchCmnDlCell *cellDl,
153 PRIVATE Void rgSCHDlSiSched ARGS((RgSchCellCb *cell,
154 RgSchCmnDlRbAllocInfo *allocInfo,
155 RgInfSfAlloc *subfrmAlloc));
156 PRIVATE Void rgSCHChkNUpdSiCfg ARGS((RgSchCellCb *cell));
157 PRIVATE Void rgSCHSelectSi ARGS((RgSchCellCb *cell));
158 #endif /*RGR_SI_SCH*/
159 /* LTE_ADV_FLAG_REMOVED_START */
162 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
170 PRIVATE S16 rgSCHCmnBuildRntpInfo (
178 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
182 RgSchDlRbAlloc *allocInfo,
185 PRIVATE U8 rgSchCmnUlRvIdxToIMcsTbl[4] = {32, 30, 31, 29};
186 PRIVATE Void rgSCHCmnUlNonadapRetx ARGS((
187 RgSchCmnUlCell *cellUl,
191 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs ARGS((
197 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi ARGS((
208 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1 ARGS((
210 RgSchDlRbAlloc *rbAllocInfo,
211 RgSchDlHqProcCb *hqP,
215 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A ARGS((
217 RgSchDlRbAlloc *rbAllocInfo,
218 RgSchDlHqProcCb *hqP,
222 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B ARGS((
224 RgSchDlRbAlloc *rbAllocInfo,
225 RgSchDlHqProcCb *hqP,
229 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2 ARGS((
231 RgSchDlRbAlloc *rbAllocInfo,
232 RgSchDlHqProcCb *hqP,
236 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A ARGS((
238 RgSchDlRbAlloc *rbAllocInfo,
239 RgSchDlHqProcCb *hqP,
246 PUBLIC Void rgSCHCmnDlSpsSch
250 /* LTE_ADV_FLAG_REMOVED_END */
252 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc ARGS((
254 RgSchCmnDlRbAllocInfo *allocInfo
256 PRIVATE Void rgSCHBcchPcchDlRbAlloc ARGS((
258 RgSchCmnDlRbAllocInfo *allocInfo
260 PRIVATE Void rgSCHCmnDlBcchPcchAlloc ARGS((
264 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
267 TfuDlCqiPucch *pucchCqi,
268 RgrUeCqiRept *ueCqiRept,
270 Bool *is2ndCwCqiAvail
272 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
275 TfuDlCqiPusch *puschCqi,
276 RgrUeCqiRept *ueCqiRept,
278 Bool *is2ndCwCqiAvail
281 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
284 TfuDlCqiPucch *pucchCqi
286 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
289 TfuDlCqiPusch *puschCqi
292 /* ccpu00117452 - MOD - Changed macro name from
293 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
295 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept ARGS((
298 RgrUeCqiRept *ueCqiRept));
299 #endif /* End of RGR_CQI_REPT */
300 /* Fix: syed align multiple UEs to refresh at same time */
301 PRIVATE Void rgSCHCmnGetRefreshPer ARGS((
305 PRIVATE S16 rgSCHCmnApplyUeRefresh ARGS((
309 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa ARGS
314 PRIVATE Void rgSCHCheckAndSetTxScheme ARGS
322 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz ARGS
332 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw ARGS
348 PRIVATE Void rgSCHCmnInitRbAlloc ARGS
354 #endif /* __cplusplus */
358 PUBLIC RgSchdApis rgSchCmnApis;
359 PRIVATE RgUlSchdApis rgSchUlSchdTbl[RGSCH_NUM_SCHEDULERS];
360 PRIVATE RgDlSchdApis rgSchDlSchdTbl[RGSCH_NUM_SCHEDULERS];
362 PRIVATE RgUlSchdApis rgSchEmtcUlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
363 PRIVATE RgDlEmtcSchdApis rgSchEmtcDlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
365 #ifdef RG_PHASE2_SCHED
366 PRIVATE RgDlfsSchdApis rgSchDlfsSchdTbl[RGSCH_NUM_DLFS_SCHEDULERS];
368 PRIVATE RgUlSchdInits rgSchUlSchdInits = RGSCH_ULSCHED_INITS;
369 PRIVATE RgDlSchdInits rgSchDlSchdInits = RGSCH_DLSCHED_INITS;
371 PRIVATE RgEmtcUlSchdInits rgSchEmtcUlSchdInits = RGSCH_EMTC_ULSCHED_INITS;
372 PRIVATE RgEmtcDlSchdInits rgSchEmtcDlSchdInits = RGSCH_EMTC_DLSCHED_INITS;
374 #if (defined (RG_PHASE2_SCHED) && defined (TFU_UPGRADE))
375 PRIVATE RgDlfsSchdInits rgSchDlfsSchdInits = RGSCH_DLFSSCHED_INITS;
378 typedef Void (*RgSchCmnDlAllocRbFunc) ARGS((RgSchCellCb *cell, RgSchDlSf *subFrm,
379 RgSchUeCb *ue, U32 bo, U32 *effBo, RgSchDlHqProcCb *proc,
380 RgSchCmnDlRbAllocInfo *cellWdAllocInfo));
381 typedef U8 (*RgSchCmnDlGetPrecInfFunc) ARGS((RgSchCellCb *cell, RgSchUeCb *ue,
382 U8 numLyrs, Bool bothCwEnbld));
383 PRIVATE Void rgSCHCmnDlAllocTxRbTM1 ARGS((
389 RgSchDlHqProcCb *proc,
390 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
392 PRIVATE Void rgSCHCmnDlAllocTxRbTM2 ARGS((
398 RgSchDlHqProcCb *proc,
399 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
401 PRIVATE Void rgSCHCmnDlAllocTxRbTM3 ARGS((
407 RgSchDlHqProcCb *proc,
408 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
410 PRIVATE Void rgSCHCmnDlAllocTxRbTM4 ARGS((
416 RgSchDlHqProcCb *proc,
417 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
420 PRIVATE Void rgSCHCmnDlAllocTxRbTM5 ARGS((
426 RgSchDlHqProcCb *proc,
427 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
430 PRIVATE Void rgSCHCmnDlAllocTxRbTM6 ARGS((
436 RgSchDlHqProcCb *proc,
437 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
439 PRIVATE Void rgSCHCmnDlAllocTxRbTM7 ARGS((
445 RgSchDlHqProcCb *proc,
446 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
448 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1 ARGS((
454 RgSchDlHqProcCb *proc,
455 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
457 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2 ARGS((
463 RgSchDlHqProcCb *proc,
464 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
466 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3 ARGS((
472 RgSchDlHqProcCb *proc,
473 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
475 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4 ARGS((
481 RgSchDlHqProcCb *proc,
482 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
485 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5 ARGS((
491 RgSchDlHqProcCb *proc,
492 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
495 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6 ARGS((
501 RgSchDlHqProcCb *proc,
502 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
504 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7 ARGS((
510 RgSchDlHqProcCb *proc,
511 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
515 PRIVATE U8 rgSchGetN1ResCount ARGS ((
519 PUBLIC Bool rgSchCmnChkDataOnlyOnPcell
525 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
532 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
533 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[7] = {rgSCHCmnDlAllocTxRbTM1,
534 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
535 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7};
537 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
538 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[7] = {rgSCHCmnDlAllocRetxRbTM1,
539 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
540 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7};
542 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
543 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[9] = {rgSCHCmnDlAllocTxRbTM1,
544 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
545 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7, NULLP, NULLP};
547 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
548 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[9] = {rgSCHCmnDlAllocRetxRbTM1,
549 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
550 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7, NULLP, NULLP};
555 PRIVATE U8 rgSCHCmnDlTM3PrecInf2 ARGS((
561 PRIVATE U8 rgSCHCmnDlTM3PrecInf4 ARGS((
567 PRIVATE U8 rgSCHCmnDlTM4PrecInf2 ARGS((
573 PRIVATE U8 rgSCHCmnDlTM4PrecInf4 ARGS((
579 /* Functions specific to each transmission mode for DL RB Allocation*/
580 RgSchCmnDlGetPrecInfFunc getPrecInfoFunc[2][2] = {
581 {rgSCHCmnDlTM3PrecInf2, rgSCHCmnDlTM3PrecInf4},
582 {rgSCHCmnDlTM4PrecInf2, rgSCHCmnDlTM4PrecInf4}
585 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb ARGS((
589 RgSchDlHqTbCb *tbInfo,
594 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb ARGS((
598 RgSchDlHqProcCb *proc,
603 PRIVATE Void rgSCHCmnDlTM3TxTx ARGS((
609 RgSchDlHqProcCb *proc,
610 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
612 PRIVATE Void rgSCHCmnDlTM3TxRetx ARGS((
618 RgSchDlHqProcCb *proc,
619 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
621 PRIVATE Void rgSCHCmnDlTM3RetxRetx ARGS((
627 RgSchDlHqProcCb *proc,
628 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
631 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc ARGS((
637 /* LTE_ADV_FLAG_REMOVED_START */
639 PRIVATE Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc ARGS((
646 /* LTE_ADV_FLAG_REMOVED_END */
647 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx ARGS((
649 RgSchCmnDlRbAllocInfo *allocInfo,
651 RgSchDlHqProcCb *proc
653 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx ARGS((
655 RgSchCmnDlRbAllocInfo *allocInfo,
659 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst ARGS((
660 RgSchCmnDlRbAllocInfo *allocInfo,
662 RgSchDlHqProcCb *proc
664 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb ARGS((
668 RgSchDlHqTbCb *reTxTb,
673 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb ARGS((
677 RgSchDlHqProcCb *proc,
682 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb ARGS((
686 RgSchDlHqTbCb *tbInfo,
692 PRIVATE Void rgSCHCmnFillHqPTb ARGS((
694 RgSchDlRbAlloc *rbAllocInfo,
700 PRIVATE Void rgSCHCmnDlGetBestFitHole ARGS((
709 #ifdef RGSCH_SPS_UNUSED
710 PRIVATE U32 rgSCHCmnGetRaType1Mask ARGS((
716 PRIVATE U32 rgSCHCmnGetRaType0Mask ARGS((
720 PRIVATE U32 rgSCHCmnGetRaType2Mask ARGS((
726 PUBLIC Bool rgSCHCmnRetxAllocAvoid ARGS((
729 RgSchDlHqProcCb *proc
732 PUBLIC U16 rgSCHCmnGetSiSetId ARGS((
740 //TODO_SID: Currenly table is only for 100 Prbs. Need to modify wrt VRBG table 8.1.5.2.1-1 V5G_213
741 U32 rgSch5gtfTbSzTbl[MAX_5GTF_MCS] =
742 {1864, 5256, 8776, 13176, 17576, 21976, 26376, 31656, 35176, 39576, 43976, 47496, 52776, 59376, 66392};
744 U32 gUl5gtfSrRecv = 0;
745 U32 gUl5gtfBsrRecv = 0;
746 U32 gUl5gtfUeSchPick = 0;
747 U32 gUl5gtfPdcchSchd = 0;
748 U32 gUl5gtfAllocAllocated = 0;
749 U32 gUl5gtfUeRbAllocDone = 0;
750 U32 gUl5gtfUeRmvFnlzZeroBo = 0;
751 U32 gUl5gtfUeFnlzReAdd = 0;
752 U32 gUl5gtfPdcchSend = 0;
753 U32 gUl5gtfRbAllocFail = 0;
754 U32 ul5gtfsidUlMarkUl = 0;
755 U32 ul5gtfsidDlSchdPass = 0;
756 U32 ul5gtfsidDlAlreadyMarkUl = 0;
757 U32 ul5gtfTotSchdCnt = 0;
760 /* CQI Offset Index to Beta CQI Offset value mapping,
761 * stored as parts per 1000. Reserved is set to 0.
762 * Refer 36.213 sec 8.6.3 Tbl 8.6.3-3 */
763 PUBLIC U32 rgSchCmnBetaCqiOffstTbl[16] = {0, 0, 1125,
764 1250, 1375, 1625, 1750, 2000, 2250, 2500, 2875,
765 3125, 3500, 4000, 5000, 6250};
766 PUBLIC U32 rgSchCmnBetaHqOffstTbl[16] = {2000, 2500, 3125,
767 4000, 5000, 6250, 8000,10000, 12625, 15875, 20000,
768 31000, 50000,80000,126000,0};
769 PUBLIC U32 rgSchCmnBetaRiOffstTbl[16] = {1250, 1625, 2000,
770 2500, 3125, 4000, 5000, 6250, 8000, 10000,12625,
772 PUBLIC S8 rgSchCmnDlCqiDiffOfst[8] = {0, 1, 2, 3, -4, -3, -2, -1};
774 /* Include CRS REs while calculating Efficiency */
775 CONSTANT PRIVATE U8 rgSchCmnAntIdx[5] = {0,0,1,0,2};
776 CONSTANT PRIVATE U8 rgSchCmnNumResForCrs[5] = {0,6,12,0,16};
783 PUBLIC S8 rgSchCmnApUeSelDiffCqi[4] = {1, 2, 3, 4};
784 PUBLIC S8 rgSchCmnApEnbConfDiffCqi[4] = {0, 1, 2, -1};
787 typedef struct rgSchCmnDlUeDciFrmtOptns
789 TfuDciFormat spfcDciFrmt; /* TM(Transmission Mode) specific DCI format.
790 * Search space : UE Specific by C-RNTI only. */
791 U8 spfcDciRAType; /* Resource Alloctn(RA) type for spfcDciFrmt */
792 TfuDciFormat prfrdDciFrmt; /* Preferred DCI format among the available
793 * options for TD (Transmit Diversity) */
794 U8 prfrdDciRAType; /* Resource Alloctn(RA) type for prfrdDciFrmt */
795 }RgSchCmnDlUeDciFrmtOptns;
798 /* DCI Format options for each Transmission Mode */
799 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[7] = {
800 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
801 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
802 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
803 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
804 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
805 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
806 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
810 /* DCI Format options for each Transmission Mode */
811 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[9] = {
812 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
813 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
814 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
815 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
816 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
817 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
818 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
823 typedef struct rgSchCmnDlImcsTbl
825 U8 modOdr; /* Modulation Order */
827 }RgSchCmnDlImcsTbl[29];
829 CONSTANT struct rgSchCmnMult235Info
831 U8 match; /* Closest number satisfying 2^a.3^b.5^c, with a bias
832 * towards the smaller number */
833 U8 prvMatch; /* Closest number not greater than array index
834 * satisfying 2^a.3^b.5^c */
835 } rgSchCmnMult235Tbl[110+1] = {
837 {1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {6, 6}, {6, 6}, {8, 8},
838 {9, 9}, {10, 10}, {10, 10}, {12, 12}, {12, 12}, {15, 12}, {15, 15},
839 {16, 16}, {16, 16}, {18, 18}, {18, 18}, {20, 20}, {20, 20}, {20, 20},
840 {24, 20}, {24, 24}, {25, 25}, {25, 25}, {27, 27}, {27, 27}, {30, 27},
841 {30, 30}, {30, 30}, {32, 32}, {32, 32}, {32, 32}, {36, 32}, {36, 36},
842 {36, 36}, {36, 36}, {40, 36}, {40, 40}, {40, 40}, {40, 40}, {45, 40},
843 {45, 40}, {45, 45}, {45, 45}, {48, 45}, {48, 48}, {48, 48}, {50, 50},
844 {50, 50}, {50, 50}, {54, 50}, {54, 54}, {54, 54}, {54, 54}, {54, 54},
845 {60, 54}, {60, 54}, {60, 60}, {60, 60}, {60, 60}, {64, 60}, {64, 64},
846 {64, 64}, {64, 64}, {64, 64}, {64, 64}, {72, 64}, {72, 64}, {72, 64},
847 {72, 72}, {72, 72}, {75, 72}, {75, 75}, {75, 75}, {75, 75}, {80, 75},
848 {80, 75}, {80, 80}, {81, 81}, {81, 81}, {81, 81}, {81, 81}, {81, 81},
849 {90, 81}, {90, 81}, {90, 81}, {90, 81}, {90, 90}, {90, 90}, {90, 90},
850 {90, 90}, {96, 90}, {96, 90}, {96, 96}, {96, 96}, {96, 96}, {100, 96},
851 {100, 100}, {100, 100}, {100, 100}, {100, 100}, {100, 100}, {108, 100},
852 {108, 100}, {108, 100}, {108, 108}, {108, 108}, {108, 108}
856 /* BI table from 36.321 Table 7.2.1 */
857 CONSTANT PRIVATE S16 rgSchCmnBiTbl[RG_SCH_CMN_NUM_BI_VAL] = {
858 0, 10, 20, 30,40,60,80,120,160,240,320,480,960};
859 PUBLIC RgSchCmnUlCqiInfo rgSchCmnUlCqiTbl[RG_SCH_CMN_UL_NUM_CQI] = {
861 {RGSCH_CMN_QM_CQI_1,RGSCH_CMN_UL_EFF_CQI_1 },
862 {RGSCH_CMN_QM_CQI_2,RGSCH_CMN_UL_EFF_CQI_2 },
863 {RGSCH_CMN_QM_CQI_3,RGSCH_CMN_UL_EFF_CQI_3 },
864 {RGSCH_CMN_QM_CQI_4,RGSCH_CMN_UL_EFF_CQI_4 },
865 {RGSCH_CMN_QM_CQI_5,RGSCH_CMN_UL_EFF_CQI_5 },
866 {RGSCH_CMN_QM_CQI_6,RGSCH_CMN_UL_EFF_CQI_6 },
867 {RGSCH_CMN_QM_CQI_7,RGSCH_CMN_UL_EFF_CQI_7 },
868 {RGSCH_CMN_QM_CQI_8,RGSCH_CMN_UL_EFF_CQI_8 },
869 {RGSCH_CMN_QM_CQI_9,RGSCH_CMN_UL_EFF_CQI_9 },
870 {RGSCH_CMN_QM_CQI_10,RGSCH_CMN_UL_EFF_CQI_10 },
871 {RGSCH_CMN_QM_CQI_11,RGSCH_CMN_UL_EFF_CQI_11 },
872 {RGSCH_CMN_QM_CQI_12,RGSCH_CMN_UL_EFF_CQI_12 },
873 {RGSCH_CMN_QM_CQI_13,RGSCH_CMN_UL_EFF_CQI_13 },
874 {RGSCH_CMN_QM_CQI_14,RGSCH_CMN_UL_EFF_CQI_14 },
875 {RGSCH_CMN_QM_CQI_15,RGSCH_CMN_UL_EFF_CQI_15 },
879 /* This table maps a (delta_offset * 2 + 2) to a (beta * 8)
880 * where beta is 10^-(delta_offset/10) rounded off to nearest 1/8
882 PRIVATE U16 rgSchCmnUlBeta8Tbl[29] = {
883 6, RG_SCH_CMN_UL_INVALID_BETA8, 8, 9, 10, 11, 13, 14, 16, 18, 20, 23,
884 25, 28, 32, RG_SCH_CMN_UL_INVALID_BETA8, 40, RG_SCH_CMN_UL_INVALID_BETA8,
885 50, RG_SCH_CMN_UL_INVALID_BETA8, 64, RG_SCH_CMN_UL_INVALID_BETA8, 80,
886 RG_SCH_CMN_UL_INVALID_BETA8, 101, RG_SCH_CMN_UL_INVALID_BETA8, 127,
887 RG_SCH_CMN_UL_INVALID_BETA8, 160
891 /* QCI to SVC priority mapping. Index specifies the Qci*/
892 PRIVATE U8 rgSchCmnDlQciPrio[RG_SCH_CMN_MAX_QCI] = RG_SCH_CMN_QCI_TO_PRIO;
894 /* The configuration is efficiency measured per 1024 REs. */
895 /* The first element stands for when CQI is not known */
896 /* This table is used to translate CQI to its corrospoding */
897 /* allocation parameters. These are currently from 36.213 */
898 /* Just this talbe needs to be edited for modifying the */
899 /* the resource allocation behaviour */
901 /* ADD CQI to MCS mapping correction
902 * single dimensional array is replaced by 2 dimensions for different CFI*/
903 PRIVATE U16 rgSchCmnCqiPdschEff[4][16] = {RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI1,
904 RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI2,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI3};
906 PRIVATE U16 rgSchCmn2LyrCqiPdschEff[4][16] = {RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI1,
907 RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI2, RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI3};
909 /* This configuration determines the transalation of a UEs CQI to its */
910 /* PDCCH coding efficiency. This may be edited based on the installation */
911 PRIVATE U8 rgSchCmnDlRvTbl[4] = {0, 2, 3, 1}; /* RVIdx sequence is corrected*/
913 /* Indexed by [DciFrmt].
914 * Considering the following definition in determining the dciFrmt index.
929 PRIVATE U16 rgSchCmnDciFrmtSizes[10];
932 PRIVATE U16 rgSchCmnCqiPdcchEff[16] = RG_SCH_CMN_CQI_TO_PDCCH_EFF;
936 PUBLIC RgSchTddUlDlSubfrmTbl rgSchTddUlDlSubfrmTbl = {
937 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME},
938 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
939 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
940 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
941 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
942 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
943 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME}
948 PUBLIC U8 rgSchTddSpsDlMaxRetxTbl[RGSCH_MAX_TDD_UL_DL_CFG] = {
960 /* Special Subframes in OFDM symbols */
961 /* ccpu00134197-MOD-Correct the number of symbols */
962 PUBLIC RgSchTddSplSubfrmInfoTbl rgSchTddSplSubfrmInfoTbl = {
966 {11, 1, 1, 10, 1, 1},
974 /* PHICH 'm' value Table */
975 PUBLIC RgSchTddPhichMValTbl rgSchTddPhichMValTbl = {
976 {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
977 {0, 1, 0, 0, 1, 0, 1, 0, 0, 1},
978 {0, 0, 0, 1, 0, 0, 0, 0, 1, 0},
979 {1, 0, 0, 0, 0, 0, 0, 0, 1, 1},
980 {0, 0, 0, 0, 0, 0, 0, 0, 1, 1},
981 {0, 0, 0, 0, 0, 0, 0, 0, 1, 0},
982 {1, 1, 0, 0, 0, 1, 1, 0, 0, 1}
985 /* PHICH 'K' value Table */
986 PUBLIC RgSchTddKPhichTbl rgSchTddKPhichTbl = {
987 {0, 0, 4, 7, 6, 0, 0, 4, 7, 6},
988 {0, 0, 4, 6, 0, 0, 0, 4, 6, 0},
989 {0, 0, 6, 0, 0, 0, 0, 6, 0, 0},
990 {0, 0, 6, 6, 6, 0, 0, 0, 0, 0},
991 {0, 0, 6, 6, 0, 0, 0, 0, 0, 0},
992 {0, 0, 6, 0, 0, 0, 0, 0, 0, 0},
993 {0, 0, 4, 6, 6, 0, 0, 4, 7, 0}
996 /* Uplink association index 'K' value Table */
997 PUBLIC RgSchTddUlAscIdxKDashTbl rgSchTddUlAscIdxKDashTbl = {
998 {0, 0, 6, 4, 0, 0, 0, 6, 4, 0},
999 {0, 0, 4, 0, 0, 0, 0, 4, 0, 0},
1000 {0, 0, 4, 4, 4, 0, 0, 0, 0, 0},
1001 {0, 0, 4, 4, 0, 0, 0, 0, 0, 0},
1002 {0, 0, 4, 0, 0, 0, 0, 0, 0, 0},
1003 {0, 0, 7, 7, 5, 0, 0, 7, 7, 0}
1007 /* PUSCH 'K' value Table */
1008 PUBLIC RgSchTddPuschTxKTbl rgSchTddPuschTxKTbl = {
1009 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1010 {0, 6, 0, 0, 4, 0, 6, 0, 0, 4},
1011 {0, 0, 0, 4, 0, 0, 0, 0, 4, 0},
1012 {4, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1013 {0, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1014 {0, 0, 0, 0, 0, 0, 0, 0, 4, 0},
1015 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1018 /* PDSCH to PUCCH Table for DL Harq Feed back. Based on the
1019 Downlink association set index 'K' table */
1020 PUBLIC U8 rgSchTddPucchTxTbl[7][10] = {
1021 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1022 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1023 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1024 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1025 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1026 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1027 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1030 /* Table to fetch the next DL sf idx for applying the
1031 new CFI. The next Dl sf Idx at which the new CFI
1032 is applied is always the starting Sf of the next ACK/NACK
1035 Ex: In Cfg-2, sf4 and sf9 are the only subframes at which
1036 a new ACK/NACK bundle of DL subframes can start
1038 D S U D D D S U D D D S U D D D S U D D
1041 dlSf Array for Cfg-2:
1042 sfNum: 0 1 3 4 5 6 8 9 0 1 3 4 5 6 8 9
1043 sfIdx: 0 1 2 3 4 5 6 7 8 9 10 11 12 12 14 15
1045 If CFI changes at sf0, nearest DL SF bundle >= 4 TTI is sf4
1046 So at sf4 the new CFI can be applied. To arrive at sf4 from
1047 sf0, the sfIdx has to be increased by 3 */
1049 PUBLIC U8 rgSchTddPdcchSfIncTbl[7][10] = {
1050 /* A/N Bundl: 0,1,5,6*/ {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
1051 /* A/N Bundl: 0,4,5,9*/ {2, 2, 0, 0, 3, 2, 2, 0, 0, 3},
1052 /* A/N Bundl: 4,9*/ {3, 6, 0, 5, 4, 3, 6, 0, 5, 4},
1053 /* A/N Bundl: 1,7,9*/ {4, 3, 0, 0, 0, 4, 5, 4, 6, 5},
1054 /* A/N Bundl: 0,6*/ {4, 3, 0, 0, 6, 5, 4, 7, 6, 5},
1055 /* A/N Bundl: 9*/ {8, 7, 0, 6, 5, 4, 12, 11, 10, 9},
1056 /* A/N Bundl: 0,1,5,6,9*/ {2, 1, 0, 0, 0, 2, 2, 0, 0, 3}
1060 /* combine compilation fixes */
1062 /* subframe offset values to be used when twoIntervalsConfig is enabled in UL
1064 PUBLIC RgSchTddSfOffTbl rgSchTddSfOffTbl = {
1065 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1066 {0, 0, 1, -1, 0, 0, 0, 1, -1, 0},
1067 {0, 0, 5, 0, 0, 0, 0, -5, 0, 0},
1068 {0, 0, 1, 1, -2, 0, 0, 0, 0, 0},
1069 {0, 0, 1, -1, 0, 0, 0, 0, 0, 0},
1070 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1071 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
1075 /* Table to determine when uplink SPS configured grants should
1076 * explicitly be reserved in a subframe. When enries are same
1077 * as that of Msg3SubfrmTbl, indicates competition with msg3.
1078 * As of now, this is same as Msg3SubfrmTbl (leaving out uldlcfg 2),
1079 * except that all 255s are now zeros. */
1080 PUBLIC RgSchTddSpsUlRsrvTbl rgSchTddSpsUlRsrvTbl = {
1081 {0, 0, 0, 6, 8, 0, 0, 0, 6, 8},
1082 {0, 0, 6, 9, 0, 0, 0, 6, 9, 0},
1083 {0, 0, 10, 0, 0, 0, 0, 10, 0, 0},
1084 {0, 0, 0, 0, 8, 0, 7, 7, 14, 0},
1085 {0, 0, 0, 9, 0, 0, 7, 15, 0, 0},
1086 {0, 0, 10, 0, 0, 0, 16, 0, 0, 0},
1087 {0, 0, 0, 0, 8, 0, 0, 0, 9, 0}
1090 /* Inverse DL Assoc Set index Table */
1091 PUBLIC RgSchTddInvDlAscSetIdxTbl rgSchTddInvDlAscSetIdxTbl = {
1092 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1093 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1094 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1095 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1096 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1097 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1098 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1101 #endif /* (LTEMAC_SPS ) */
1103 /* Number of Uplink subframes Table */
1104 PRIVATE U8 rgSchTddNumUlSf[] = {6, 4, 2, 3, 2, 1, 5};
1106 /* Downlink HARQ processes Table */
1107 PUBLIC RgSchTddUlNumHarqProcTbl rgSchTddUlNumHarqProcTbl = { 7, 4, 2, 3, 2, 1, 6};
1109 /* Uplink HARQ processes Table */
1110 PUBLIC RgSchTddDlNumHarqProcTbl rgSchTddDlNumHarqProcTbl = { 4, 7, 10, 9, 12, 15, 6};
1112 /* Downlink association index set 'K' value Table */
1113 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlAscSetIdxKTbl = {
1114 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1116 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1118 { {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}} },
1120 { {0, {0}}, {0, {0}}, {3, {7, 6, 11}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1122 { {0, {0}}, {0, {0}}, {4, {12, 8, 7, 11}}, {4, {6, 5, 4, 7}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1124 { {0, {0}}, {0, {0}}, {9, {13, 12, 9, 8, 7, 5, 4, 11, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1126 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1129 /* ccpu132282-ADD-the table rgSchTddDlAscSetIdxKTbl is rearranged in
1130 * decreasing order of Km, this is used to calculate the NCE used for
1131 * calculating N1Pucch Resource for Harq*/
1132 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlHqPucchResCalTbl = {
1133 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1135 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1137 { {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}} },
1139 { {0, {0}}, {0, {0}}, {3, {11, 7, 6}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1141 { {0, {0}}, {0, {0}}, {4, {12, 11, 8, 7}}, {4, {7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1143 { {0, {0}}, {0, {0}}, {9, {13, 12, 11, 9, 8, 7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1145 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1148 /* Minimum number of Ack/Nack feeback information to be
1149 stored for each UL-DL configuration */
1150 PUBLIC RgSchTddANFdbkMapTbl rgSchTddANFdbkMapTbl = {4, 4, 2, 3, 2, 1, 5};
1152 /* Uplink switch points and number of UL subframes Table */
1153 PUBLIC RgSchTddMaxUlSubfrmTbl rgSchTddMaxUlSubfrmTbl = {
1154 {2,3,3}, {2,2,2}, {2,1,1}, {1,3,0}, {1,2,0}, {1,1,0}, {2,3,2}
1157 /* Uplink switch points and number of DL subframes Table */
1158 PUBLIC RgSchTddMaxDlSubfrmTbl rgSchTddMaxDlSubfrmTbl = {
1159 {2,2,2}, {2,3,3}, {2,4,4}, {1,7,0}, {1,8,0}, {1,9,0}, {2,2,3}
1162 /* Number of UL subframes present before a particular subframe */
1163 PUBLIC RgSchTddNumUlSubfrmTbl rgSchTddNumUlSubfrmTbl = {
1164 {0, 0, 1, 2, 3, 3, 3, 4, 5, 6},
1165 {0, 0, 1, 2, 2, 2, 2, 3, 4, 4},
1166 {0, 0, 1, 1, 1, 1, 1, 2, 2, 2},
1167 {0, 0, 1, 2, 3, 3, 3, 3, 3, 3},
1168 {0, 0, 1, 2, 2, 2, 2, 2, 2, 2},
1169 {0, 0, 1, 1, 1, 1, 1, 1, 1, 1},
1170 {0, 0, 1, 2, 3, 3, 3, 4, 5, 5}
1173 /* Number of DL subframes present till a particular subframe */
1174 PUBLIC RgSchTddNumDlSubfrmTbl rgSchTddNumDlSubfrmTbl = {
1175 {1, 2, 2, 2, 2, 3, 4, 4, 4, 4},
1176 {1, 2, 2, 2, 3, 4, 5, 5, 5, 6},
1177 {1, 2, 2, 3, 4, 5, 6, 6, 7, 8},
1178 {1, 2, 2, 2, 2, 3, 4, 5, 6, 7},
1179 {1, 2, 2, 2, 3, 4, 5, 6, 7, 8},
1180 {1, 2, 2, 3, 4, 5, 6, 7, 8, 9},
1181 {1, 2, 2, 2, 2, 3, 4, 4, 4, 5}
1185 /* Nearest possible UL subframe Index from UL subframe
1186 * DL Index < UL Index */
1187 PUBLIC RgSchTddLowDlSubfrmIdxTbl rgSchTddLowDlSubfrmIdxTbl = {
1188 {0, 1, 1, 1, 1, 5, 6, 6, 6, 6},
1189 {0, 1, 1, 1, 4, 5, 6, 6, 6, 9},
1190 {0, 1, 1, 3, 4, 5, 6, 6, 8, 9},
1191 {0, 1, 1, 1, 1, 5, 6, 7, 8, 9},
1192 {0, 1, 1, 1, 4, 5, 6, 7, 8, 9},
1193 {0, 1, 1, 3, 4, 5, 6, 7, 8, 9},
1194 {0, 1, 1, 1, 1, 5, 6, 6, 6, 9}
1197 /* Nearest possible DL subframe Index from UL subframe
1198 * DL Index > UL Index
1199 * 10 represents Next SFN low DL Idx */
1200 PUBLIC RgSchTddHighDlSubfrmIdxTbl rgSchTddHighDlSubfrmIdxTbl = {
1201 {0, 1, 5, 5, 5, 5, 6, 10, 10, 10},
1202 {0, 1, 4, 4, 4, 5, 6, 9, 9, 9},
1203 {0, 1, 3, 3, 4, 5, 6, 8, 8, 9},
1204 {0, 1, 5, 5, 5, 5, 6, 7, 8, 9},
1205 {0, 1, 4, 4, 4, 5, 6, 7, 8, 9},
1206 {0, 1, 3, 3, 4, 5, 6, 7, 8, 9},
1207 {0, 1, 5, 5, 5, 5, 6, 9, 9, 9}
1210 /* RACH Message3 related information */
1211 PUBLIC RgSchTddMsg3SubfrmTbl rgSchTddMsg3SubfrmTbl = {
1212 {7, 6, 255, 255, 255, 7, 6, 255, 255, 255},
1213 {7, 6, 255, 255, 8, 7, 6, 255, 255, 8},
1214 {7, 6, 255, 9, 8, 7, 6, 255, 9, 8},
1215 {12, 11, 255, 255, 255, 7, 6, 6, 6, 13},
1216 {12, 11, 255, 255, 8, 7, 6, 6, 14, 13},
1217 {12, 11, 255, 9, 8, 7, 6, 15, 14, 13},
1218 {7, 6, 255, 255, 255, 7, 6, 255, 255, 8}
1221 /* ccpu00132341-DEL Removed rgSchTddRlsDlSubfrmTbl and used Kset table for
1222 * releasing DL HARQs */
1224 /* DwPTS Scheduling Changes Start */
1225 /* Provides the number of Cell Reference Signals in DwPTS
1227 PRIVATE U8 rgSchCmnDwptsCrs[2][3] = {/* [Spl Sf cfg][Ant Port] */
1228 {4, 8, 16}, /* Spl Sf cfg 1,2,3,6,7,8 */
1229 {6, 12, 20}, /* Spl Sf cfg 4 */
1232 PRIVATE S8 rgSchCmnSplSfDeltaItbs[9] = RG_SCH_DWPTS_ITBS_ADJ;
1233 /* DwPTS Scheduling Changes End */
1237 PRIVATE U32 rgSchCmnBsrTbl[64] = {
1238 0, 10, 12, 14, 17, 19, 22, 26,
1239 31, 36, 42, 49, 57, 67, 78, 91,
1240 107, 125, 146, 171, 200, 234, 274, 321,
1241 376, 440, 515, 603, 706, 826, 967, 1132,
1242 1326, 1552, 1817, 2127, 2490, 2915, 3413, 3995,
1243 4677, 5476, 6411, 7505, 8787, 10287, 12043, 14099,
1244 16507, 19325, 22624, 26487, 31009, 36304, 42502, 49759,
1245 58255, 68201, 79846, 93479, 109439, 128125, 150000, 220000
1248 PRIVATE U32 rgSchCmnExtBsrTbl[64] = {
1249 0, 10, 13, 16, 19, 23, 29, 35,
1250 43, 53, 65, 80, 98, 120, 147, 181,
1251 223, 274, 337, 414, 509, 625, 769, 945,
1252 1162, 1429, 1757, 2161, 2657, 3267, 4017, 4940,
1253 6074, 7469, 9185, 11294, 13888, 17077, 20999, 25822,
1254 31752, 39045, 48012, 59039, 72598, 89272, 109774, 134986,
1255 165989, 204111, 250990, 308634, 379519, 466683, 573866, 705666,
1256 867737, 1067031, 1312097, 1613447, 1984009, 2439678, 3000000, 3100000
1259 PUBLIC U8 rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_MAX_CP][RG_SCH_CMN_UL_NUM_CQI];
1261 PUBLIC RgSchTbSzTbl rgTbSzTbl = {
1263 {16, 32, 56, 88, 120, 152, 176, 208, 224, 256, 288, 328, 344, 376, 392, 424, 456, 488, 504, 536, 568, 600, 616, 648, 680, 712, 744, 776, 776, 808, 840, 872, 904, 936, 968, 1000, 1032, 1032, 1064, 1096, 1128, 1160, 1192, 1224, 1256, 1256, 1288, 1320, 1352, 1384, 1416, 1416, 1480, 1480, 1544, 1544, 1608, 1608, 1608, 1672, 1672, 1736, 1736, 1800, 1800, 1800, 1864, 1864, 1928, 1928, 1992, 1992, 2024, 2088, 2088, 2088, 2152, 2152, 2216, 2216, 2280, 2280, 2280, 2344, 2344, 2408, 2408, 2472, 2472, 2536, 2536, 2536, 2600, 2600, 2664, 2664, 2728, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 2984, 2984, 3112},
1264 {24, 56, 88, 144, 176, 208, 224, 256, 328, 344, 376, 424, 456, 488, 520, 568, 600, 632, 680, 712, 744, 776, 808, 872, 904, 936, 968, 1000, 1032, 1064, 1128, 1160, 1192, 1224, 1256, 1288, 1352, 1384, 1416, 1416, 1480, 1544, 1544, 1608, 1608, 1672, 1736, 1736, 1800, 1800, 1864, 1864, 1928, 1992, 1992, 2024, 2088, 2088, 2152, 2152, 2216, 2280, 2280, 2344, 2344, 2408, 2472, 2472, 2536, 2536, 2600, 2600, 2664, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008},
1265 {32, 72, 144, 176, 208, 256, 296, 328, 376, 424, 472, 520, 568, 616, 648, 696, 744, 776, 840, 872, 936, 968, 1000, 1064, 1096, 1160, 1192, 1256, 1288, 1320, 1384, 1416, 1480, 1544, 1544, 1608, 1672, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2024, 2088, 2088, 2152, 2216, 2216, 2280, 2344, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2664, 2728, 2792, 2856, 2856, 2856, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968},
1266 {40, 104, 176, 208, 256, 328, 392, 440, 504, 568, 616, 680, 744, 808, 872, 904, 968, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1864, 1928, 1992, 2024, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6200, 6456, 6456},
1267 {56, 120, 208, 256, 328, 408, 488, 552, 632, 696, 776, 840, 904, 1000, 1064, 1128, 1192, 1288, 1352, 1416, 1480, 1544, 1608, 1736, 1800, 1864, 1928, 1992, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2600, 2664, 2728, 2792, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992},
1268 {72, 144, 224, 328, 424, 504, 600, 680, 776, 872, 968, 1032, 1128, 1224, 1320, 1384, 1480, 1544, 1672, 1736, 1864, 1928, 2024, 2088, 2216, 2280, 2344, 2472, 2536, 2664, 2728, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4264, 4392, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9528},
1269 {328, 176, 256, 392, 504, 600, 712, 808, 936, 1032, 1128, 1224, 1352, 1480, 1544, 1672, 1736, 1864, 1992, 2088, 2216, 2280, 2408, 2472, 2600, 2728, 2792, 2984, 2984, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3880, 4008, 4136, 4136, 4264, 4392, 4584, 4584, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448},
1270 {104, 224, 328, 472, 584, 712, 840, 968, 1096, 1224, 1320, 1480, 1608, 1672, 1800, 1928, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536},
1271 {120, 256, 392, 536, 680, 808, 968, 1096, 1256, 1384, 1544, 1672, 1800, 1928, 2088, 2216, 2344, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15264},
1272 {136, 296, 456, 616, 776, 936, 1096, 1256, 1416, 1544, 1736, 1864, 2024, 2216, 2344, 2536, 2664, 2856, 2984, 3112, 3368, 3496, 3624, 3752, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 5160, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568},
1273 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080},
1274 {176, 376, 584, 776, 1000, 1192, 1384, 1608, 1800, 2024, 2216, 2408, 2600, 2792, 2984, 3240, 3496, 3624, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152},
1275 {208, 440, 680, 904, 1128, 1352, 1608, 1800, 2024, 2280, 2472, 2728, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 23688, 24496, 24496, 24496, 24496, 25456},
1276 {224, 488, 744, 1000, 1256, 1544, 1800, 2024, 2280, 2536, 2856, 3112, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 26416, 27376, 27376, 27376, 27376, 28336, 28336},
1277 {256, 552, 840, 1128, 1416, 1736, 1992, 2280, 2600, 2856, 3112, 3496, 3752, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 5992, 6200, 6456, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704},
1278 {280, 600, 904, 1224, 1544, 1800, 2152, 2472, 2728, 3112, 3368, 3624, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 6200, 6456, 6712, 6968, 7224, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008},
1279 {328, 632, 968, 1288, 1608, 1928, 2280, 2600, 2984, 3240, 3624, 3880, 4264, 4584, 4968, 5160, 5544, 5992, 6200, 6456, 6712, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 35160},
1280 {336, 696, 1064, 1416, 1800, 2152, 2536, 2856, 3240, 3624, 4008, 4392, 4776, 5160, 5352, 5736, 6200, 6456, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 39232},
1281 {376, 776, 1160, 1544, 1992, 2344, 2792, 3112, 3624, 4008, 4392, 4776, 5160, 5544, 5992, 6200, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14112, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816},
1282 {408, 840, 1288, 1736, 2152, 2600, 2984, 3496, 3880, 4264, 4776, 5160, 5544, 5992, 6456, 6968, 7224, 7736, 8248, 8504, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19080, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888},
1283 {440, 904, 1384, 1864, 2344, 2792, 3240, 3752, 4136, 4584, 5160, 5544, 5992, 6456, 6968, 7480, 7992, 8248, 8760, 9144, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 20616, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 48936, 51024, 51024, 51024},
1284 {488, 1000, 1480, 1992, 2472, 2984, 3496, 4008, 4584, 4968, 5544, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 9912, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056},
1285 {520, 1064, 1608, 2152, 2664, 3240, 3752, 4264, 4776, 5352, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 10296, 10680, 11448, 11832, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 19080, 19080, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256},
1286 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776},
1287 {584, 1192, 1800, 2408, 2984, 3624, 4264, 4968, 5544, 5992, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 22920, 23688, 24496, 25456, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592},
1288 {616, 1256, 1864, 2536, 3112, 3752, 4392, 5160, 5736, 6200, 6968, 7480, 8248, 8760, 9528, 10296, 10680, 11448, 12216, 12576, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592, 68808, 68808, 68808, 71112},
1289 {712, 1480, 2216, 2984, 3752, 4392, 5160, 5992, 6712, 7480, 8248, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 14688, 15264, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376}
1292 {32, 88, 152, 208, 256, 328, 376, 424, 488, 536, 600, 648, 712, 776, 808, 872, 936, 1000, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2088, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 5992, 6200},
1293 {56, 144, 208, 256, 344, 424, 488, 568, 632, 712, 776, 872, 936, 1000, 1064, 1160, 1224, 1288, 1384, 1416, 1544, 1608, 1672, 1736, 1800, 1864, 1992, 2024, 2088, 2152, 2280, 2344, 2408, 2472, 2536, 2600, 2728, 2792, 2856, 2856, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5352, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992},
1294 {72, 176, 256, 328, 424, 520, 616, 696, 776, 872, 968, 1064, 1160, 1256, 1320, 1416, 1544, 1608, 1672, 1800, 1864, 1992, 2088, 2152, 2216, 2344, 2408, 2536, 2600, 2664, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3368, 3496, 3624, 3624, 3752, 3880, 4008, 4008, 4136, 4264, 4264, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912},
1295 {104, 208, 328, 440, 568, 680, 808, 904, 1032, 1160, 1256, 1384, 1480, 1608, 1736, 1864, 1992, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2856, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4392, 4584, 4776, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12576, 12960, 12960},
1296 {120, 256, 408, 552, 696, 840, 1000, 1128, 1288, 1416, 1544, 1736, 1864, 1992, 2152, 2280, 2408, 2600, 2728, 2856, 2984, 3112, 3240, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840},
1297 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19080},
1298 {176, 392, 600, 808, 1032, 1224, 1480, 1672, 1864, 2088, 2280, 2472, 2728, 2984, 3112, 3368, 3496, 3752, 4008, 4136, 4392, 4584, 4776, 4968, 5160, 5352, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7992, 8248, 8248, 8504, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920},
1299 {224, 472, 712, 968, 1224, 1480, 1672, 1928, 2216, 2472, 2664, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376},
1300 {256, 536, 808, 1096, 1384, 1672, 1928, 2216, 2536, 2792, 3112, 3368, 3624, 3880, 4264, 4584, 4776, 4968, 5352, 5544, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576},
1301 {296, 616, 936, 1256, 1544, 1864, 2216, 2536, 2856, 3112, 3496, 3752, 4136, 4392, 4776, 5160, 5352, 5736, 5992, 6200, 6712, 6968, 7224, 7480, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160},
1302 {328, 680, 1032, 1384, 1736, 2088, 2472, 2792, 3112, 3496, 3880, 4264, 4584, 4968, 5352, 5736, 5992, 6200, 6712, 6968, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 21384, 21384, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888},
1303 {376, 776, 1192, 1608, 2024, 2408, 2792, 3240, 3624, 4008, 4392, 4776, 5352, 5736, 5992, 6456, 6968, 7224, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816},
1304 {440, 904, 1352, 1800, 2280, 2728, 3240, 3624, 4136, 4584, 4968, 5544, 5992, 6456, 6712, 7224, 7736, 8248, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 17568, 17568, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024},
1305 {488, 1000, 1544, 2024, 2536, 3112, 3624, 4136, 4584, 5160, 5736, 6200, 6712, 7224, 7736, 8248, 8760, 9144, 9912, 10296, 10680, 11448, 11832, 12216, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336},
1306 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776},
1307 {600, 1224, 1800, 2472, 3112, 3624, 4264, 4968, 5544, 6200, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 23688, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808},
1308 {632, 1288, 1928, 2600, 3240, 3880, 4584, 5160, 5992, 6456, 7224, 7736, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 68808, 71112, 71112, 71112, 71112},
1309 {696, 1416, 2152, 2856, 3624, 4392, 5160, 5736, 6456, 7224, 7992, 8760, 9528, 10296, 10680, 11448, 12216, 12960, 13536, 14688, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 71112, 73712, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 78704},
1310 {776, 1544, 2344, 3112, 4008, 4776, 5544, 6200, 7224, 7992, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 27376, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936},
1311 {840, 1736, 2600, 3496, 4264, 5160, 5992, 6968, 7736, 8504, 9528, 10296, 11064, 12216, 12960, 13536, 14688, 15264, 16416, 16992, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800},
1312 {904, 1864, 2792, 3752, 4584, 5544, 6456, 7480, 8248, 9144, 10296, 11064, 12216, 12960, 14112, 14688, 15840, 16992, 17568, 18336, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 42368, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 97896, 101840, 101840, 101840},
1313 {1000, 1992, 2984, 4008, 4968, 5992, 6968, 7992, 9144, 9912, 11064, 12216, 12960, 14112, 15264, 15840, 16992, 18336, 19080, 19848, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 31704, 32856, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136},
1314 {1064, 2152, 3240, 4264, 5352, 6456, 7480, 8504, 9528, 10680, 11832, 12960, 14112, 15264, 16416, 16992, 18336, 19080, 20616, 21384, 22920, 23688, 24496, 25456, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816},
1315 {1128, 2280, 3496, 4584, 5736, 6968, 7992, 9144, 10296, 11448, 12576, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 45352, 45352, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840,101840,101840,101840,105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496},
1316 {1192, 2408, 3624, 4968, 5992, 7224, 8504, 9912, 11064, 12216, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 42368, 43816, 45352, 46888, 46888, 48936, 51024, 51024, 52752, 52752, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208},
1317 {1256, 2536, 3752, 5160, 6200, 7480, 8760, 10296, 11448, 12576, 14112, 15264, 16416, 17568, 19080, 20616, 21384, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 43816, 43816, 45352, 46888, 48936, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040,115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208, 137792, 137792, 137792, 142248},
1318 {1480, 2984, 4392, 5992, 7480, 8760, 10296, 11832, 13536, 14688, 16416, 17568, 19080, 20616, 22152, 23688, 25456, 26416, 28336, 29296, 30576, 32856, 34008, 35160, 36696, 37888, 40576, 40576, 42368, 43816, 45352, 46888, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 63776, 63776, 66592, 68808, 68808, 71112, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 81176, 84760, 84760, 87936, 87936, 90816, 90816, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 128496, 128496, 128496, 133208, 133208, 133208, 137792, 137792, 137792, 142248, 142248, 142248, 146856, 146856,149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776}
1321 RgSchUlIMcsTbl rgUlIMcsTbl = {
1322 {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5},
1323 {2, 6}, {2, 7}, {2, 8}, {2, 9}, {2, 10},
1324 {4, 10}, {4, 11}, {4, 12}, {4, 13}, {4, 14},
1325 {4, 15}, {4, 16}, {4, 17}, {4, 18}, {4, 19},
1326 {6, 19}, {6, 20}, {6, 21}, {6, 22}, {6, 23},
1327 {6, 24}, {6, 25}, {6, 26}
1329 RgSchUeCatTbl rgUeCatTbl = {
1330 /*Column1:Maximum number of bits of an UL-SCH
1331 transport block transmitted within a TTI
1333 Column2:Maximum number of bits of a DLSCH
1334 transport block received within a TTI
1336 Column3:Total number of soft channel bits
1338 Column4:Support for 64QAM in UL
1340 Column5:Maximum number of DL-SCH transport
1341 block bits received within a TTI
1343 Column6:Maximum number of supported layers for
1344 spatial multiplexing in DL
1346 {5160, {10296,0}, 250368, FALSE, 10296, 1},
1347 {25456, {51024,0}, 1237248, FALSE, 51024, 2},
1348 {51024, {75376,0}, 1237248, FALSE, 102048, 2},
1349 {51024, {75376,0}, 1827072, FALSE, 150752, 2},
1350 {75376, {149776,0}, 3667200, TRUE, 299552, 4},
1351 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1352 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1353 {149776,{299856,0}, 35982720,TRUE, 2998560, 8}
1356 /* [ccpu00138532]-ADD-The below table stores the min HARQ RTT time
1357 in Downlink for TDD and FDD. Indices 0 to 6 map to tdd UL DL config 0-6.
1358 Index 7 map to FDD */
1359 U8 rgSchCmnHarqRtt[8] = {4,7,10,9,12,15,6,8};
1360 /* Number of CFI Switchover Index is equals to 7 TDD Indexes + 1 FDD index */
1361 U8 rgSchCfiSwitchOvrWinLen[] = {7, 4, 2, 3, 2, 1, 6, 8};
1363 /* EffTbl is calculated for single layer and two layers.
1364 * CqiToTbs is calculated for single layer and two layers */
1365 RgSchCmnTbSzEff rgSchCmnNorCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1366 RgSchCmnTbSzEff rgSchCmnNorCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1367 /* New variable to store UL effiency values for normal and extended CP*/
1368 RgSchCmnTbSzEff rgSchCmnNorUlEff[1],rgSchCmnExtUlEff[1];
1369 RgSchCmnCqiToTbs rgSchCmnNorCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1370 RgSchCmnCqiToTbs rgSchCmnNorCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1371 RgSchCmnCqiToTbs *rgSchCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
1372 RgSchCmnTbSzEff rgSchCmnExtCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1373 RgSchCmnTbSzEff rgSchCmnExtCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1374 RgSchCmnCqiToTbs rgSchCmnExtCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1375 RgSchCmnCqiToTbs rgSchCmnExtCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1376 /* Include CRS REs while calculating Efficiency */
1377 RgSchCmnTbSzEff *rgSchCmnEffTbl[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_ANT_CONF][RG_SCH_CMN_MAX_CFI];
1378 RgSchCmnTbSzEff *rgSchCmnUlEffTbl[RG_SCH_CMN_MAX_CP];
1380 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3, 1};
1382 /* Added matrix 'rgRaPrmblToRaFrmTbl'for computation of RA sub-frames from RA preamble */
1383 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3};
1386 EXTERN RgUlSchdInits rgSchUlSchdInits;
1387 EXTERN RgDlSchdInits rgSchDlSchdInits;
1388 EXTERN RgDlfsSchdInits rgSchDlfsSchdInits;
1390 EXTERN RgEmtcUlSchdInits rgSchEmtcUlSchdInits;
1391 EXTERN RgEmtcDlSchdInits rgSchEmtcDlSchdInits;
1395 PRIVATE S16 rgSCHCmnUeIdleExdThrsld ARGS((
1399 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe ARGS((
1403 PRIVATE Void rgSCHCmnDelDedPreamble ARGS((
1407 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe ARGS((
1410 CmLteTimingInfo timingInfo
1412 PRIVATE Void rgSCHCmnDelRachInfo ARGS((
1416 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe ARGS((
1422 PRIVATE Void rgSCHCmnHdlHoPo ARGS((
1424 CmLListCp *raRspLst,
1425 RgSchRaReqInfo *raReq
1427 PRIVATE Void rgSCHCmnAllocPoHoGrnt ARGS((
1429 CmLListCp *raRspLst,
1431 RgSchRaReqInfo *raReq
1433 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf ARGS((
1440 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ ARGS((
1444 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ ARGS((
1448 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx ARGS((
1451 PRIVATE Void rgSCHCmnUpdRachParam ARGS((
1454 PRIVATE S16 rgSCHCmnAllocPOParam ARGS((
1462 PRIVATE Void rgSCHCmnGenPdcchOrder ARGS((
1466 PRIVATE Void rgSCHCmnCfgRachDedPrm ARGS((
1471 PRIVATE Void rgSCHCmnHdlUlInactUes ARGS((
1474 PRIVATE Void rgSCHCmnHdlDlInactUes ARGS((
1477 PRIVATE Void rgSCHCmnUlInit ARGS((Void
1479 PRIVATE Void rgSCHCmnDlInit ARGS((Void
1481 PRIVATE Void rgSCHCmnInitDlRbAllocInfo ARGS((
1482 RgSchCmnDlRbAllocInfo *allocInfo
1484 PRIVATE Void rgSCHCmnUpdUlCompEffBsr ARGS((
1488 PRIVATE Void rgSCHCmnUlSetAllUnSched ARGS((
1489 RgSchCmnUlRbAllocInfo *allocInfo
1491 PRIVATE Void rgSCHCmnUlUpdSf ARGS((
1493 RgSchCmnUlRbAllocInfo *allocInfo,
1496 PRIVATE Void rgSCHCmnUlHndlAllocRetx ARGS((
1498 RgSchCmnUlRbAllocInfo *allocInfo,
1503 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch ARGS((
1507 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch ARGS((
1511 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ ARGS((
1515 PRIVATE S16 rgSCHCmnTmrExpiry ARGS((
1516 PTR cb, /* Pointer to timer control block */
1517 S16 tmrEvnt /* Timer Event */
1519 PRIVATE S16 rgSCHCmnTmrProc ARGS((
1522 PRIVATE Void rgSCHCmnAddUeToRefreshQ ARGS((
1527 PRIVATE Void rgSCHCmnDlCcchRetx ARGS((
1529 RgSchCmnDlRbAllocInfo *allocInfo
1531 PRIVATE Void rgSCHCmnUpdUeMimoInfo ARGS((
1535 RgSchCmnCell *cellSchd
1537 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo ARGS((
1541 RgSchCmnUe *ueSchCmn,
1542 RgSchCmnCell *cellSchd,
1546 PRIVATE Void rgSCHCmnDlCcchSduRetx ARGS((
1548 RgSchCmnDlRbAllocInfo *allocInfo
1550 PRIVATE Void rgSCHCmnDlCcchSduTx ARGS((
1552 RgSchCmnDlRbAllocInfo *allocInfo
1554 PRIVATE S16 rgSCHCmnCcchSduAlloc ARGS((
1557 RgSchCmnDlRbAllocInfo *allocInfo
1559 PRIVATE S16 rgSCHCmnCcchSduDedAlloc ARGS((
1563 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc ARGS((
1569 PRIVATE Void rgSCHCmnInitVars ARGS((
1573 /*ccpu00117180 - DEL - Moved rgSCHCmnUpdVars to .x as its access is now PUBLIC */
1574 PRIVATE Void rgSCHCmnUlRbAllocForLst ARGS((
1580 CmLListCp *nonSchdLst,
1583 PRIVATE S16 rgSCHCmnUlRbAllocForUe ARGS((
1590 PRIVATE Void rgSCHCmnMsg3GrntReq ARGS((
1594 RgSchUlHqProcCb *hqProc,
1595 RgSchUlAlloc **ulAllocRef,
1598 PRIVATE Void rgSCHCmnDlCcchRarAlloc ARGS((
1601 PRIVATE Void rgSCHCmnDlCcchTx ARGS((
1603 RgSchCmnDlRbAllocInfo *allocInfo
1605 PRIVATE Void rgSCHCmnDlBcchPcch ARGS((
1607 RgSchCmnDlRbAllocInfo *allocInfo,
1608 RgInfSfAlloc *subfrmAlloc
1610 PUBLIC Bool rgSCHCmnChkInWin ARGS((
1611 CmLteTimingInfo frm,
1612 CmLteTimingInfo start,
1615 PUBLIC Bool rgSCHCmnChkPastWin ARGS((
1616 CmLteTimingInfo frm,
1619 PRIVATE Void rgSCHCmnClcAlloc ARGS((
1622 RgSchClcDlLcCb *lch,
1624 RgSchCmnDlRbAllocInfo *allocInfo
1627 PRIVATE Void rgSCHCmnClcRbAlloc ARGS((
1638 PRIVATE S16 rgSCHCmnMsg4Alloc ARGS((
1641 RgSchCmnDlRbAllocInfo *allocInfo
1643 PRIVATE S16 rgSCHCmnMsg4DedAlloc ARGS((
1647 PRIVATE Void rgSCHCmnDlRaRsp ARGS((
1649 RgSchCmnDlRbAllocInfo *allocInfo
1651 PRIVATE S16 rgSCHCmnRaRspAlloc ARGS((
1657 RgSchCmnDlRbAllocInfo *allocInfo
1659 PRIVATE Void rgSCHCmnUlUeDelAllocs ARGS((
1663 PRIVATE Void rgSCHCmnDlSetUeAllocLmt ARGS((
1668 PRIVATE S16 rgSCHCmnDlRgrCellCfg ARGS((
1673 PRIVATE Void rgSCHCmnUlAdapRetx ARGS((
1674 RgSchUlAlloc *alloc,
1675 RgSchUlHqProcCb *proc
1677 PRIVATE Void rgSCHCmnUlUpdAllocRetx ARGS((
1681 PRIVATE Void rgSCHCmnUlSfReTxAllocs ARGS((
1685 /* Fix: syed Adaptive Msg3 Retx crash. */
1687 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1691 RgrUeRecfg *ueRecfg,
1695 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1705 * DL RB allocation specific functions
1708 PRIVATE Void rgSCHCmnDlRbAlloc ARGS((
1710 RgSchCmnDlRbAllocInfo *allocInfo
1712 PRIVATE Void rgSCHCmnNonDlfsRbAlloc ARGS((
1714 RgSchCmnDlRbAllocInfo *allocInfo
1716 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc ARGS((
1718 RgSchDlRbAlloc *cmnAllocInfo));
1721 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj ARGS((
1723 RgSchDlRbAlloc *cmnAllocInfo,
1727 /* Added function to adjust TBSize*/
1728 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj ARGS((
1729 RgSchDlRbAlloc *allocInfo,
1730 U8 numOvrlapgPbchRb,
1736 /* Added function to find num of overlapping PBCH rb*/
1737 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs ARGS((
1740 RgSchDlRbAlloc *allocInfo,
1741 U8 *numOvrlapgPbchRb
1744 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl ARGS((
1747 RgSchDlRbAlloc *allocInfo
1751 PRIVATE Void rgSCHCmnFindCodeRate ARGS((
1754 RgSchDlRbAlloc *allocInfo,
1760 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc ARGS((
1762 RgSchCmnMsg4RbAlloc *msg4AllocInfo,
1765 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc ARGS((
1771 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc ARGS((
1778 PRIVATE U32 rgSCHCmnCalcRiv ARGS(( U8 bw,
1784 PRIVATE Void rgSCHCmnUpdHqAndDai ARGS((
1785 RgSchDlHqProcCb *hqP,
1787 RgSchDlHqTbCb *tbCb,
1790 PRIVATE S16 rgSCHCmnUlCalcAvailBw ARGS((
1792 RgrCellCfg *cellCfg,
1797 PRIVATE S16 rgSCHCmnDlKdashUlAscInit ARGS((
1800 PRIVATE S16 rgSCHCmnDlANFdbkInit ARGS((
1803 PRIVATE S16 rgSCHCmnDlNpValInit ARGS((
1806 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst ARGS((
1809 PRIVATE S16 rgSCHCmnDlCpyRachInfo ARGS((
1811 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
1814 PRIVATE S16 rgSCHCmnDlRachInfoInit ARGS((
1817 PRIVATE S16 rgSCHCmnDlPhichOffsetInit ARGS((
1822 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt ARGS
1828 PRIVATE RgSchCmnRank rgSCHCmnComputeRank ARGS
1835 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3 ARGS
1840 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3 ARGS
1845 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4 ARGS
1850 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4 ARGS
1855 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr ARGS
1862 /* comcodsepa : start */
1865 * @brief This function computes efficiency and stores in a table.
1869 * Function: rgSCHCmnCompEff
1870 * Purpose: this function computes the efficiency as number of
1871 * bytes per 1024 symbols. The CFI table is also filled
1872 * with the same information such that comparison is valid
1874 * Invoked by: Scheduler
1876 * @param[in] U8 noPdcchSym
1877 * @param[in] U8 cpType
1878 * @param[in] U8 txAntIdx
1879 * @param[in] RgSchCmnTbSzEff* effTbl
1884 PRIVATE Void rgSCHCmnCompEff
1889 RgSchCmnTbSzEff *effTbl
1892 PRIVATE Void rgSCHCmnCompEff(noPdcchSym, cpType, txAntIdx, effTbl)
1896 RgSchCmnTbSzEff *effTbl;
1901 U8 resOfCrs; /* Effective REs occupied by CRS */
1904 TRC2(rgSCHCmnCompEff);
1908 case RG_SCH_CMN_NOR_CP:
1911 case RG_SCH_CMN_EXT_CP:
1915 /* Generate a log error. This case should never be executed */
1919 /* Depending on the Tx Antenna Index, deduct the
1920 * Resource elements for the CRS */
1924 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
1927 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
1930 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
1933 /* Generate a log error. This case should never be executed */
1936 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
1937 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
1940 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
1942 /* This line computes the coding efficiency per 1024 REs */
1943 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
1945 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
1950 * @brief This function computes efficiency and stores in a table.
1954 * Function: rgSCHCmnCompUlEff
1955 * Purpose: this function computes the efficiency as number of
1956 * bytes per 1024 symbols. The CFI table is also filled
1957 * with the same information such that comparison is valid
1959 * Invoked by: Scheduler
1961 * @param[in] U8 noUlRsSym
1962 * @param[in] U8 cpType
1963 * @param[in] U8 txAntIdx
1964 * @param[in] RgSchCmnTbSzEff* effTbl
1969 PRIVATE Void rgSCHCmnCompUlEff
1973 RgSchCmnTbSzEff *effTbl
1976 PRIVATE Void rgSCHCmnCompUlEff(noUlRsSym, cpType, effTbl)
1979 RgSchCmnTbSzEff *effTbl;
1986 TRC2(rgSCHCmnCompUlEff);
1990 case RG_SCH_CMN_NOR_CP:
1993 case RG_SCH_CMN_EXT_CP:
1997 /* Generate a log error. This case should never be executed */
2001 noResPerRb = ((noSymPerRb - noUlRsSym) * RB_SCH_CMN_NUM_SCS_PER_RB);
2002 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2005 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2007 /* This line computes the coding efficiency per 1024 REs */
2008 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
2010 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
2016 * @brief This function computes efficiency for 2 layers and stores in a table.
2020 * Function: rgSCHCmn2LyrCompEff
2021 * Purpose: this function computes the efficiency as number of
2022 * bytes per 1024 symbols. The CFI table is also filled
2023 * with the same information such that comparison is valid
2025 * Invoked by: Scheduler
2027 * @param[in] U8 noPdcchSym
2028 * @param[in] U8 cpType
2029 * @param[in] U8 txAntIdx
2030 * @param[in] RgSchCmnTbSzEff* effTbl2Lyr
2035 PRIVATE Void rgSCHCmn2LyrCompEff
2040 RgSchCmnTbSzEff *effTbl2Lyr
2043 PRIVATE Void rgSCHCmn2LyrCompEff(noPdcchSym, cpType, txAntIdx, effTbl2Lyr)
2047 RgSchCmnTbSzEff *effTbl2Lyr;
2052 U8 resOfCrs; /* Effective REs occupied by CRS */
2055 TRC2(rgSCHCmn2LyrCompEff);
2059 case RG_SCH_CMN_NOR_CP:
2062 case RG_SCH_CMN_EXT_CP:
2066 /* Generate a log error. This case should never be executed */
2070 /* Depending on the Tx Antenna Index, deduct the
2071 * Resource elements for the CRS */
2075 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
2078 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
2081 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
2084 /* Generate a log error. This case should never be executed */
2088 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
2089 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2091 (*effTbl2Lyr)[i] = 0;
2092 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2094 /* This line computes the coding efficiency per 1024 REs */
2095 (*effTbl2Lyr)[i] += (rgTbSzTbl[1][i][j] * 1024) / (noResPerRb * (j+1));
2097 (*effTbl2Lyr)[i] /= RG_SCH_CMN_NUM_RBS;
2104 * @brief This function initializes the rgSchCmnDciFrmtSizes table.
2108 * Function: rgSCHCmnGetDciFrmtSizes
2109 * Purpose: This function determines the sizes of all
2110 * the available DCI Formats. The order of
2111 * bits addition for each format is inaccordance
2113 * Invoked by: rgSCHCmnRgrCellCfg
2119 PRIVATE Void rgSCHCmnGetDciFrmtSizes
2124 PRIVATE Void rgSCHCmnGetDciFrmtSizes(cell)
2129 TRC2(rgSCHCmnGetDciFrmtSizes);
2131 /* DCI Format 0 size determination */
2132 rgSchCmnDciFrmtSizes[0] = 1 +
2134 rgSCHUtlLog32bitNbase2((cell->bwCfg.ulTotalBw * \
2135 (cell->bwCfg.ulTotalBw + 1))/2) +
2145 /* DCI Format 1 size determination */
2146 rgSchCmnDciFrmtSizes[1] = 1 +
2147 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2152 4 + 2 + /* HqProc Id and DAI */
2158 /* DCI Format 1A size determination */
2159 rgSchCmnDciFrmtSizes[2] = 1 + /* Flag for format0/format1a differentiation */
2160 1 + /* Localized/distributed VRB assignment flag */
2163 3 + /* Harq process Id */
2165 4 + /* Harq process Id */
2166 2 + /* UL Index or DAI */
2168 1 + /* New Data Indicator */
2171 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2172 (cell->bwCfg.dlTotalBw + 1))/2);
2173 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
2174 Since VRB is local */
2176 /* DCI Format 1B size determination */
2177 rgSchCmnDciFrmtSizes[3] = 1 +
2178 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2179 (cell->bwCfg.dlTotalBw + 1))/2) +
2189 ((cell->numTxAntPorts == 4)? 4:2) +
2192 /* DCI Format 1C size determination */
2193 /* Approximation: NDLVrbGap1 ~= Nprb for DL */
2194 rgSchCmnDciFrmtSizes[4] = (cell->bwCfg.dlTotalBw < 50)? 0:1 +
2195 (cell->bwCfg.dlTotalBw < 50)?
2196 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/2 * \
2197 (cell->bwCfg.dlTotalBw/2 + 1))/2)) :
2198 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/4 * \
2199 (cell->bwCfg.dlTotalBw/4 + 1))/2)) +
2202 /* DCI Format 1D size determination */
2203 rgSchCmnDciFrmtSizes[5] = 1 +
2204 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2205 (cell->bwCfg.dlTotalBw + 1))/2) +
2214 ((cell->numTxAntPorts == 4)? 4:2) +
2217 /* DCI Format 2 size determination */
2218 rgSchCmnDciFrmtSizes[6] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2219 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2227 ((cell->numTxAntPorts == 4)? 6:3);
2229 /* DCI Format 2A size determination */
2230 rgSchCmnDciFrmtSizes[7] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2231 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2239 ((cell->numTxAntPorts == 4)? 2:0);
2241 /* DCI Format 3 size determination */
2242 rgSchCmnDciFrmtSizes[8] = rgSchCmnDciFrmtSizes[0];
2244 /* DCI Format 3A size determination */
2245 rgSchCmnDciFrmtSizes[9] = rgSchCmnDciFrmtSizes[0];
2252 * @brief This function initializes the cmnCell->dciAggrLvl table.
2256 * Function: rgSCHCmnGetCqiDciFrmt2AggrLvl
2257 * Purpose: This function determines the Aggregation level
2258 * for each CQI level against each DCI format.
2259 * Invoked by: rgSCHCmnRgrCellCfg
2265 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl
2270 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl(cell)
2274 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2278 TRC2(rgSCHCmnGetCqiDciFrmt2AggrLvl);
2280 for (i = 0; i < RG_SCH_CMN_MAX_CQI; i++)
2282 for (j = 0; j < 10; j++)
2284 U32 pdcchBits; /* Actual number of phy bits needed for a given DCI Format
2285 * for a given CQI Level */
2286 pdcchBits = (rgSchCmnDciFrmtSizes[j] * 1024)/rgSchCmnCqiPdcchEff[i];
2288 if (pdcchBits < 192)
2290 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL2;
2293 if (pdcchBits < 384)
2295 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL4;
2298 if (pdcchBits < 768)
2300 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL8;
2303 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL16;
2310 * @brief This function initializes all the data for the scheduler.
2314 * Function: rgSCHCmnDlInit
2315 * Purpose: This function initializes the following information:
2316 * 1. Efficiency table
2317 * 2. CQI to table index - It is one row for upto 3 RBs
2318 * and another row for greater than 3 RBs
2319 * currently extended prefix is compiled out.
2320 * Invoked by: MAC intialization code..may be ActvInit
2326 PRIVATE Void rgSCHCmnDlInit
2330 PRIVATE Void rgSCHCmnDlInit()
2337 RgSchCmnTbSzEff *effTbl;
2338 RgSchCmnCqiToTbs *tbsTbl;
2340 TRC2(rgSCHCmnDlInit);
2342 /* 0 corresponds to Single layer case, 1 corresponds to 2 layers case*/
2343 /* Init Efficiency table for normal cyclic prefix */
2344 /*Initialize Efficiency table for Layer Index 0 */
2345 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2346 /*Initialize Efficiency table for each of the CFI indices. The
2347 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2348 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[0];
2349 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[0];
2350 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[0];
2351 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[0];
2352 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2353 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[0];
2354 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[0];
2355 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[0];
2356 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[0];
2357 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2358 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[0];
2359 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[0];
2360 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[0];
2361 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[0];
2363 /*Initialize CQI to TBS table for Layer Index 0 for Normal CP */
2364 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[0];
2365 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[0];
2366 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[0];
2367 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[0];
2369 /*Intialize Efficency table for Layer Index 1 */
2370 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2371 /*Initialize Efficiency table for each of the CFI indices. The
2372 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2373 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[1];
2374 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[1];
2375 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[1];
2376 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[1];
2377 /*Initialize Efficiency table for Tx Antenna Port Index 1 */
2378 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[1];
2379 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[1];
2380 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[1];
2381 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[1];
2382 /*Initialize Efficiency table for Tx Antenna Port Index 2 */
2383 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[1];
2384 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[1];
2385 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[1];
2386 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[1];
2388 /*Initialize CQI to TBS table for Layer Index 1 for Normal CP */
2389 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[1];
2390 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[1];
2391 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[1];
2392 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[1];
2394 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2396 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2398 /* EfficiencyTbl calculation incase of 2 layers for normal CP */
2399 rgSCHCmnCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx,\
2400 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i]);
2401 rgSCHCmn2LyrCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx, \
2402 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i]);
2406 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2408 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2410 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i];
2411 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][i];
2412 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2413 (j >= 0) && (k > 0); --j)
2415 /* ADD CQI to MCS mapping correction
2416 * single dimensional array is replaced by 2 dimensions for different CFI*/
2417 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2419 (*tbsTbl)[k--] = (U8)j;
2426 /* effTbl,tbsTbl calculation incase of 2 layers for normal CP */
2427 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i];
2428 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][i];
2429 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2430 (j >= 0) && (k > 0); --j)
2432 /* ADD CQI to MCS mapping correction
2433 * single dimensional array is replaced by 2 dimensions for different CFI*/
2434 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2436 (*tbsTbl)[k--] = (U8)j;
2446 /* Efficiency Table for Extended CP */
2447 /*Initialize Efficiency table for Layer Index 0 */
2448 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2449 /*Initialize Efficiency table for each of the CFI indices. The
2450 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2451 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[0];
2452 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[0];
2453 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[0];
2454 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[0];
2455 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2456 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[0];
2457 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[0];
2458 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[0];
2459 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[0];
2460 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2461 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[0];
2462 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[0];
2463 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[0];
2464 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[0];
2466 /*Initialize CQI to TBS table for Layer Index 0 for Extended CP */
2467 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[0];
2468 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[0];
2469 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[0];
2470 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[0];
2472 /*Initialize Efficiency table for Layer Index 1 */
2473 /*Initialize Efficiency table for each of the CFI indices. The
2474 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2475 /*Initialize Efficency table for Tx Antenna Port Index 0 */
2476 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[1];
2477 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[1];
2478 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[1];
2479 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[1];
2480 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2481 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[1];
2482 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[1];
2483 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[1];
2484 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[1];
2485 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2486 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[1];
2487 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[1];
2488 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[1];
2489 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[1];
2491 /*Initialize CQI to TBS table for Layer Index 1 for Extended CP */
2492 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[1];
2493 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[1];
2494 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[1];
2495 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[1];
2496 /* Activate this code when extended cp is supported */
2497 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2499 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2501 /* EfficiencyTbl calculation incase of 2 layers for extendedl CP */
2502 rgSCHCmnCompEff( (U8)(i + 1 ), (U8)RG_SCH_CMN_EXT_CP, idx,\
2503 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i]);
2504 rgSCHCmn2LyrCompEff((U8)(i + 1), (U8) RG_SCH_CMN_EXT_CP,idx, \
2505 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i]);
2509 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2511 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2513 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i];
2514 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][i];
2515 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2516 (j >= 0) && (k > 0); --j)
2518 /* ADD CQI to MCS mapping correction
2519 * single dimensional array is replaced by 2 dimensions for different CFI*/
2520 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2522 (*tbsTbl)[k--] = (U8)j;
2529 /* effTbl,tbsTbl calculation incase of 2 layers for extended CP */
2530 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i];
2531 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][i];
2532 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2533 (j >= 0) && (k > 0); --j)
2535 /* ADD CQI to MCS mapping correction
2536 * single dimensional array is replaced by 2 dimensions for different CFI*/
2537 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2539 (*tbsTbl)[k--] = (U8)j;
2552 * @brief This function initializes all the data for the scheduler.
2556 * Function: rgSCHCmnUlInit
2557 * Purpose: This function initializes the following information:
2558 * 1. Efficiency table
2559 * 2. CQI to table index - It is one row for upto 3 RBs
2560 * and another row for greater than 3 RBs
2561 * currently extended prefix is compiled out.
2562 * Invoked by: MAC intialization code..may be ActvInit
2568 PRIVATE Void rgSCHCmnUlInit
2572 PRIVATE Void rgSCHCmnUlInit()
2575 U8 *mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_NOR_CP][0];
2576 RgSchCmnTbSzEff *effTbl = &rgSchCmnNorUlEff[0];
2577 CONSTANT RgSchCmnUlCqiInfo *cqiTbl = &rgSchCmnUlCqiTbl[0];
2580 TRC2(rgSCHCmnUlInit);
2582 /* Initaializing new variable added for UL eff */
2583 rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP] = &rgSchCmnNorUlEff[0];
2584 /* Reason behind using 3 as the number of symbols to rule out for
2585 * efficiency table computation would be that we are using 2 symbols for
2586 * DMRS(1 in each slot) and 1 symbol for SRS*/
2587 rgSCHCmnCompUlEff(RGSCH_UL_SYM_DMRS_SRS,RG_SCH_CMN_NOR_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP]);
2589 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2590 i >= 0 && j > 0; --i)
2592 if ((*effTbl)[i] <= cqiTbl[j].eff)
2594 mapTbl[j--] = (U8)i;
2601 effTbl = &rgSchCmnExtUlEff[0];
2602 mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_EXT_CP][0];
2604 /* Initaializing new variable added for UL eff */
2605 rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP] = &rgSchCmnExtUlEff[0];
2606 /* Reason behind using 3 as the number of symbols to rule out for
2607 * efficiency table computation would be that we are using 2 symbols for
2608 * DMRS(1 in each slot) and 1 symbol for SRS*/
2609 rgSCHCmnCompUlEff(3,RG_SCH_CMN_EXT_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP]);
2611 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2612 i >= 0 && j > 0; --i)
2614 if ((*effTbl)[i] <= cqiTbl[j].eff)
2616 mapTbl[j--] = (U8)i;
2628 * @brief This function initializes all the data for the scheduler.
2632 * Function: rgSCHCmnInit
2633 * Purpose: This function initializes the following information:
2634 * 1. Efficiency table
2635 * 2. CQI to table index - It is one row for upto 3 RBs
2636 * and another row for greater than 3 RBs
2637 * currently extended prefix is compiled out.
2638 * Invoked by: MAC intialization code..may be ActvInit
2644 PUBLIC Void rgSCHCmnInit
2648 PUBLIC Void rgSCHCmnInit()
2657 rgSCHEmtcCmnDlInit();
2658 rgSCHEmtcCmnUlInit();
2664 /* Init the function pointers */
2665 rgSchCmnApis.rgSCHRgrUeCfg = rgSCHCmnRgrUeCfg;
2666 rgSchCmnApis.rgSCHRgrUeRecfg = rgSCHCmnRgrUeRecfg;
2667 rgSchCmnApis.rgSCHFreeUe = rgSCHCmnUeDel;
2668 rgSchCmnApis.rgSCHRgrCellCfg = rgSCHCmnRgrCellCfg;
2669 rgSchCmnApis.rgSCHRgrCellRecfg = rgSCHCmnRgrCellRecfg;
2670 rgSchCmnApis.rgSCHFreeCell = rgSCHCmnCellDel;
2671 rgSchCmnApis.rgSCHRgrLchCfg = rgSCHCmnRgrLchCfg;
2672 rgSchCmnApis.rgSCHRgrLcgCfg = rgSCHCmnRgrLcgCfg;
2673 rgSchCmnApis.rgSCHRgrLchRecfg = rgSCHCmnRgrLchRecfg;
2674 rgSchCmnApis.rgSCHRgrLcgRecfg = rgSCHCmnRgrLcgRecfg;
2675 rgSchCmnApis.rgSCHFreeDlLc = rgSCHCmnFreeDlLc;
2676 rgSchCmnApis.rgSCHFreeLcg = rgSCHCmnLcgDel;
2677 rgSchCmnApis.rgSCHRgrLchDel = rgSCHCmnRgrLchDel;
2678 rgSchCmnApis.rgSCHActvtUlUe = rgSCHCmnActvtUlUe;
2679 rgSchCmnApis.rgSCHActvtDlUe = rgSCHCmnActvtDlUe;
2680 rgSchCmnApis.rgSCHHdlUlTransInd = rgSCHCmnHdlUlTransInd;
2681 rgSchCmnApis.rgSCHDlDedBoUpd = rgSCHCmnDlDedBoUpd;
2682 rgSchCmnApis.rgSCHUlRecMsg3Alloc = rgSCHCmnUlRecMsg3Alloc;
2683 rgSchCmnApis.rgSCHUlCqiInd = rgSCHCmnUlCqiInd;
2684 rgSchCmnApis.rgSCHPucchDeltaPwrInd = rgSCHPwrPucchDeltaInd;
2685 rgSchCmnApis.rgSCHUlHqProcForUe = rgSCHCmnUlHqProcForUe;
2687 rgSchCmnApis.rgSCHUpdUlHqProc = rgSCHCmnUpdUlHqProc;
2689 rgSchCmnApis.rgSCHUpdBsrShort = rgSCHCmnUpdBsrShort;
2690 rgSchCmnApis.rgSCHUpdBsrTrunc = rgSCHCmnUpdBsrTrunc;
2691 rgSchCmnApis.rgSCHUpdBsrLong = rgSCHCmnUpdBsrLong;
2692 rgSchCmnApis.rgSCHUpdPhr = rgSCHCmnUpdPhr;
2693 rgSchCmnApis.rgSCHUpdExtPhr = rgSCHCmnUpdExtPhr;
2694 rgSchCmnApis.rgSCHContResUlGrant = rgSCHCmnContResUlGrant;
2695 rgSchCmnApis.rgSCHSrRcvd = rgSCHCmnSrRcvd;
2696 rgSchCmnApis.rgSCHFirstRcptnReq = rgSCHCmnFirstRcptnReq;
2697 rgSchCmnApis.rgSCHNextRcptnReq = rgSCHCmnNextRcptnReq;
2698 rgSchCmnApis.rgSCHFirstHqFdbkAlloc = rgSCHCmnFirstHqFdbkAlloc;
2699 rgSchCmnApis.rgSCHNextHqFdbkAlloc = rgSCHCmnNextHqFdbkAlloc;
2700 rgSchCmnApis.rgSCHDlProcAddToRetx = rgSCHCmnDlProcAddToRetx;
2701 rgSchCmnApis.rgSCHDlCqiInd = rgSCHCmnDlCqiInd;
2703 rgSchCmnApis.rgSCHUlProcAddToRetx = rgSCHCmnEmtcUlProcAddToRetx;
2706 rgSchCmnApis.rgSCHSrsInd = rgSCHCmnSrsInd;
2708 rgSchCmnApis.rgSCHDlTARpt = rgSCHCmnDlTARpt;
2709 rgSchCmnApis.rgSCHDlRlsSubFrm = rgSCHCmnDlRlsSubFrm;
2710 rgSchCmnApis.rgSCHUeReset = rgSCHCmnUeReset;
2712 rgSchCmnApis.rgSCHHdlCrntiCE = rgSCHCmnHdlCrntiCE;
2713 rgSchCmnApis.rgSCHDlProcAck = rgSCHCmnDlProcAck;
2714 rgSchCmnApis.rgSCHDlRelPdcchFbk = rgSCHCmnDlRelPdcchFbk;
2715 rgSchCmnApis.rgSCHUlSpsRelInd = rgSCHCmnUlSpsRelInd;
2716 rgSchCmnApis.rgSCHUlSpsActInd = rgSCHCmnUlSpsActInd;
2717 rgSchCmnApis.rgSCHUlCrcFailInd = rgSCHCmnUlCrcFailInd;
2718 rgSchCmnApis.rgSCHUlCrcInd = rgSCHCmnUlCrcInd;
2720 rgSchCmnApis.rgSCHDrxStrtInActvTmrInUl = rgSCHCmnDrxStrtInActvTmrInUl;
2721 rgSchCmnApis.rgSCHUpdUeDataIndLcg = rgSCHCmnUpdUeDataIndLcg;
2723 for (idx = 0; idx < RGSCH_NUM_SCHEDULERS; ++idx)
2725 rgSchUlSchdInits[idx](&rgSchUlSchdTbl[idx]);
2726 rgSchDlSchdInits[idx](&rgSchDlSchdTbl[idx]);
2729 for (idx = 0; idx < RGSCH_NUM_EMTC_SCHEDULERS; ++idx)
2731 rgSchEmtcUlSchdInits[idx](&rgSchEmtcUlSchdTbl[idx]);
2732 rgSchEmtcDlSchdInits[idx](&rgSchEmtcDlSchdTbl[idx]);
2735 #if (defined (RG_PHASE2_SCHED) && defined(TFU_UPGRADE))
2736 for (idx = 0; idx < RGSCH_NUM_DLFS_SCHEDULERS; ++idx)
2738 rgSchDlfsSchdInits[idx](&rgSchDlfsSchdTbl[idx]);
2742 rgSchCmnApis.rgSCHRgrSCellUeCfg = rgSCHCmnRgrSCellUeCfg;
2743 rgSchCmnApis.rgSCHRgrSCellUeDel = rgSCHCmnRgrSCellUeDel;
2750 * @brief This function is a wrapper to call scheduler specific API.
2754 * Function: rgSCHCmnDlRlsSubFrm
2755 * Purpose: Releases scheduler Information from DL SubFrm.
2759 * @param[in] RgSchCellCb *cell
2760 * @param[out] CmLteTimingInfo frm
2765 PUBLIC Void rgSCHCmnDlRlsSubFrm
2771 PUBLIC Void rgSCHCmnDlRlsSubFrm(cell, frm)
2773 CmLteTimingInfo frm;
2776 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2779 TRC2(rgSCHCmnDlRlsSubFrm);
2781 /* Get the pointer to the subframe */
2782 sf = rgSCHUtlSubFrmGet(cell, frm);
2784 rgSCHUtlSubFrmPut(cell, sf);
2787 /* Re-initialize DLFS specific information for the sub-frame */
2788 cellSch->apisDlfs->rgSCHDlfsReinitSf(cell, sf);
2796 * @brief This function is the starting function for DL allocation.
2800 * Function: rgSCHCmnDlCmnChAlloc
2801 * Purpose: Scheduling for downlink. It performs allocation in the order
2802 * of priority wich BCCH/PCH first, CCCH, Random Access and TA.
2804 * Invoked by: Scheduler
2806 * @param[in] RgSchCellCb* cell
2807 * @param[out] RgSchCmnDlRbAllocInfo* allocInfo
2812 PRIVATE Void rgSCHCmnDlCcchRarAlloc
2817 PRIVATE Void rgSCHCmnDlCcchRarAlloc(cell)
2821 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2823 TRC2(rgSCHCmnDlCcchRarAlloc);
2825 rgSCHCmnDlCcchRetx(cell, &cellSch->allocInfo);
2826 /* LTE_ADV_FLAG_REMOVED_START */
2827 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2829 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2831 /*eNodeB need to blank the subframe */
2835 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2840 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2842 /* LTE_ADV_FLAG_REMOVED_END */
2846 /*Added these function calls for processing CCCH SDU arriving
2847 * after guard timer expiry.Functions differ from above two functions
2848 * in using ueCb instead of raCb.*/
2849 rgSCHCmnDlCcchSduRetx(cell, &cellSch->allocInfo);
2850 /* LTE_ADV_FLAG_REMOVED_START */
2851 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2853 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2855 /*eNodeB need to blank the subframe */
2859 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2864 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2866 /* LTE_ADV_FLAG_REMOVED_END */
2870 if(cellSch->ul.msg3SchdIdx != RGSCH_INVALID_INFO)
2872 /* Do not schedule msg3 if there is a CFI change ongoing */
2873 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2875 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2879 /* LTE_ADV_FLAG_REMOVED_START */
2880 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2882 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2884 /*eNodeB need to blank the subframe */
2888 /* Do not schedule msg3 if there is a CFI change ongoing */
2889 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2891 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2897 /* Do not schedule msg3 if there is a CFI change ongoing */
2898 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2900 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2903 /* LTE_ADV_FLAG_REMOVED_END */
2911 * @brief Scheduling for CCCH SDU.
2915 * Function: rgSCHCmnCcchSduAlloc
2916 * Purpose: Scheduling for CCCH SDU
2918 * Invoked by: Scheduler
2920 * @param[in] RgSchCellCb* cell
2921 * @param[in] RgSchUeCb* ueCb
2922 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2927 PRIVATE S16 rgSCHCmnCcchSduAlloc
2931 RgSchCmnDlRbAllocInfo *allocInfo
2934 PRIVATE S16 rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)
2937 RgSchCmnDlRbAllocInfo *allocInfo;
2940 RgSchDlRbAlloc *rbAllocInfo;
2941 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2942 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
2944 TRC2(rgSCHCmnCcchSduAlloc);
2946 /* Return if subframe BW exhausted */
2947 if (allocInfo->ccchSduAlloc.ccchSduDlSf->bw <=
2948 allocInfo->ccchSduAlloc.ccchSduDlSf->bwAssigned)
2950 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2951 "bw<=bwAssigned for UEID:%d",ueCb->ueId);
2955 if (rgSCHDhmGetCcchSduHqProc(ueCb, cellSch->dl.time, &(ueDl->proc)) != ROK)
2957 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2958 "rgSCHDhmGetCcchSduHqProc failed UEID:%d",ueCb->ueId);
2962 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
2963 rbAllocInfo->dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
2965 if (rgSCHCmnCcchSduDedAlloc(cell, ueCb) != ROK)
2967 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
2968 rgSCHDhmRlsHqpTb(ueDl->proc, 0, FALSE);
2969 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2970 "rgSCHCmnCcchSduDedAlloc failed UEID:%d",ueCb->ueId);
2973 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduTxLst, &ueDl->proc->reqLnk);
2974 ueDl->proc->reqLnk.node = (PTR)ueDl->proc;
2975 allocInfo->ccchSduAlloc.ccchSduDlSf->schdCcchUe++;
2980 * @brief This function scheduler for downlink CCCH messages.
2984 * Function: rgSCHCmnDlCcchSduTx
2985 * Purpose: Scheduling for downlink CCCH
2987 * Invoked by: Scheduler
2989 * @param[in] RgSchCellCb *cell
2990 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2995 PRIVATE Void rgSCHCmnDlCcchSduTx
2998 RgSchCmnDlRbAllocInfo *allocInfo
3001 PRIVATE Void rgSCHCmnDlCcchSduTx(cell, allocInfo)
3003 RgSchCmnDlRbAllocInfo *allocInfo;
3008 RgSchCmnDlUe *ueCmnDl;
3009 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3011 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3013 TRC2(rgSCHCmnDlCcchSduTx);
3015 node = cell->ccchSduUeLst.first;
3018 if(cellSch->dl.maxCcchPerDlSf &&
3019 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3025 ueCb = (RgSchUeCb *)(node->node);
3026 ueCmnDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3028 /* Fix : syed postpone scheduling for this
3029 * until msg4 is done */
3030 /* Fix : syed RLC can erroneously send CCCH SDU BO
3031 * twice. Hence an extra guard to avoid if already
3032 * scheduled for RETX */
3033 if ((!(ueCb->dl.dlInactvMask & RG_HQENT_INACTIVE)) &&
3036 if ((rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)) != ROK)
3043 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"ERROR!! THIS SHOULD "
3044 "NEVER HAPPEN for UEID:%d", ueCb->ueId);
3054 * @brief This function scheduler for downlink CCCH messages.
3058 * Function: rgSCHCmnDlCcchTx
3059 * Purpose: Scheduling for downlink CCCH
3061 * Invoked by: Scheduler
3063 * @param[in] RgSchCellCb *cell
3064 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3069 PRIVATE Void rgSCHCmnDlCcchTx
3072 RgSchCmnDlRbAllocInfo *allocInfo
3075 PRIVATE Void rgSCHCmnDlCcchTx(cell, allocInfo)
3077 RgSchCmnDlRbAllocInfo *allocInfo;
3082 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3083 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3085 TRC2(rgSCHCmnDlCcchTx);
3087 node = cell->raInfo.toBeSchdLst.first;
3090 if(cellSch->dl.maxCcchPerDlSf &&
3091 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3098 raCb = (RgSchRaCb *)(node->node);
3100 /* Address allocation for this UE for MSG 4 */
3101 /* Allocation for Msg4 */
3102 if ((rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)) != ROK)
3113 * @brief This function scheduler for downlink CCCH messages.
3117 * Function: rgSCHCmnDlCcchSduRetx
3118 * Purpose: Scheduling for downlink CCCH
3120 * Invoked by: Scheduler
3122 * @param[in] RgSchCellCb *cell
3123 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3128 PRIVATE Void rgSCHCmnDlCcchSduRetx
3131 RgSchCmnDlRbAllocInfo *allocInfo
3134 PRIVATE Void rgSCHCmnDlCcchSduRetx(cell, allocInfo)
3136 RgSchCmnDlRbAllocInfo *allocInfo;
3139 RgSchDlRbAlloc *rbAllocInfo;
3141 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3143 RgSchDlHqProcCb *hqP;
3146 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3148 TRC2(rgSCHCmnDlCcchSduRetx);
3150 node = cellSch->dl.ccchSduRetxLst.first;
3153 if(cellSch->dl.maxCcchPerDlSf &&
3154 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3161 hqP = (RgSchDlHqProcCb *)(node->node);
3164 /* DwPts Scheduling Changes Start */
3166 if (rgSCHCmnRetxAvoidTdd(allocInfo->ccchSduAlloc.ccchSduDlSf,
3172 /* DwPts Scheduling Changes End */
3174 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3178 ueCb = (RgSchUeCb*)(hqP->hqE->ue);
3179 ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3181 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
3182 /* Fill RB Alloc Info */
3183 rbAllocInfo->dlSf = dlSf;
3184 rbAllocInfo->tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3185 rbAllocInfo->rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3186 /* Fix : syed iMcs setting did not correspond to RETX */
3187 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3188 rbAllocInfo->tbInfo[0].imcs);
3189 rbAllocInfo->rnti = ueCb->ueId;
3190 rbAllocInfo->tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3191 /* Fix : syed Copying info in entirety without depending on stale TX information */
3192 rbAllocInfo->tbInfo[0].tbCb = &hqP->tbInfo[0];
3193 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
3194 /* Fix : syed Assigning proc to scratchpad */
3197 retxBw += rbAllocInfo->rbsReq;
3199 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduRetxLst, \
3201 hqP->reqLnk.node = (PTR)hqP;
3205 dlSf->bwAssigned += retxBw;
3211 * @brief This function scheduler for downlink CCCH messages.
3215 * Function: rgSCHCmnDlCcchRetx
3216 * Purpose: Scheduling for downlink CCCH
3218 * Invoked by: Scheduler
3220 * @param[in] RgSchCellCb *cell
3221 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3226 PRIVATE Void rgSCHCmnDlCcchRetx
3229 RgSchCmnDlRbAllocInfo *allocInfo
3232 PRIVATE Void rgSCHCmnDlCcchRetx(cell, allocInfo)
3234 RgSchCmnDlRbAllocInfo *allocInfo;
3238 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3240 RgSchDlHqProcCb *hqP;
3242 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3244 TRC2(rgSCHCmnDlCcchRetx);
3246 node = cellSch->dl.msg4RetxLst.first;
3249 if(cellSch->dl.maxCcchPerDlSf &&
3250 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3256 hqP = (RgSchDlHqProcCb *)(node->node);
3260 /* DwPts Scheduling Changes Start */
3262 if (rgSCHCmnRetxAvoidTdd(allocInfo->msg4Alloc.msg4DlSf,
3268 /* DwPts Scheduling Changes End */
3270 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3274 raCb = (RgSchRaCb*)(hqP->hqE->raCb);
3275 /* Fill RB Alloc Info */
3276 raCb->rbAllocInfo.dlSf = dlSf;
3277 raCb->rbAllocInfo.tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3278 raCb->rbAllocInfo.rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3279 /* Fix : syed iMcs setting did not correspond to RETX */
3280 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3281 raCb->rbAllocInfo.tbInfo[0].imcs);
3282 raCb->rbAllocInfo.rnti = raCb->tmpCrnti;
3283 raCb->rbAllocInfo.tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3284 /* Fix; syed Copying info in entirety without depending on stale TX information */
3285 raCb->rbAllocInfo.tbInfo[0].tbCb = &hqP->tbInfo[0];
3286 raCb->rbAllocInfo.tbInfo[0].schdlngForTb = TRUE;
3288 retxBw += raCb->rbAllocInfo.rbsReq;
3290 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4RetxLst, \
3292 hqP->reqLnk.node = (PTR)hqP;
3296 dlSf->bwAssigned += retxBw;
3302 * @brief This function implements scheduler DL allocation for
3303 * for broadcast (on PDSCH) and paging.
3307 * Function: rgSCHCmnDlBcchPcch
3308 * Purpose: This function implements scheduler for DL allocation
3309 * for broadcast (on PDSCH) and paging.
3311 * Invoked by: Scheduler
3313 * @param[in] RgSchCellCb* cell
3319 PRIVATE Void rgSCHCmnDlBcchPcch
3322 RgSchCmnDlRbAllocInfo *allocInfo,
3323 RgInfSfAlloc *subfrmAlloc
3326 PRIVATE Void rgSCHCmnDlBcchPcch(cell, allocInfo, subfrmAlloc)
3328 RgSchCmnDlRbAllocInfo *allocInfo;
3329 RgInfSfAlloc *subfrmAlloc;
3332 CmLteTimingInfo frm;
3334 RgSchClcDlLcCb *pcch;
3338 RgSchClcDlLcCb *bcch, *bch;
3339 #endif/*RGR_SI_SCH*/
3342 TRC2(rgSCHCmnDlBcchPcch);
3344 frm = cell->crntTime;
3346 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
3347 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
3348 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
3350 // RGSCH_SUBFRAME_INDEX(frm);
3351 //RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
3354 /* Compute the subframe for which allocation is being made */
3355 /* essentially, we need pointer to the dl frame for this subframe */
3356 sf = rgSCHUtlSubFrmGet(cell, frm);
3360 bch = rgSCHDbmGetBcchOnBch(cell);
3361 #if (ERRCLASS & ERRCLS_DEBUG)
3364 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on BCH is not configured");
3368 if (bch->boLst.first != NULLP)
3370 bo = (RgSchClcBoRpt *)(bch->boLst.first->node);
3371 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3373 sf->bch.tbSize = bo->bo;
3374 cmLListDelFrm(&bch->boLst, bch->boLst.first);
3375 /* ccpu00117052 - MOD - Passing double pointer
3376 for proper NULLP assignment*/
3377 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(*bo));
3378 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, bch->lcId,TRUE);
3383 if ((frm.sfn % 4 == 0) && (frm.subframe == 0))
3388 allocInfo->bcchAlloc.schdFirst = FALSE;
3389 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
3390 #if (ERRCLASS & ERRCLS_DEBUG)
3393 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3397 if (bcch->boLst.first != NULLP)
3399 bo = (RgSchClcBoRpt *)(bcch->boLst.first->node);
3401 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3403 allocInfo->bcchAlloc.schdFirst = TRUE;
3404 /* Time to perform allocation for this BCCH transmission */
3405 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3409 if(!allocInfo->bcchAlloc.schdFirst)
3412 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
3413 #if (ERRCLASS & ERRCLS_DEBUG)
3416 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3420 lnk = bcch->boLst.first;
3421 while (lnk != NULLP)
3423 bo = (RgSchClcBoRpt *)(lnk->node);
3425 valid = rgSCHCmnChkInWin(frm, bo->timeToTx, bo->maxTimeToTx);
3429 bo->i = RGSCH_CALC_SF_DIFF(frm, bo->timeToTx);
3430 /* Time to perform allocation for this BCCH transmission */
3431 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3436 valid = rgSCHCmnChkPastWin(frm, bo->maxTimeToTx);
3439 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
3440 /* ccpu00117052 - MOD - Passing double pointer
3441 for proper NULLP assignment*/
3442 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo,
3443 sizeof(RgSchClcBoRpt));
3449 rgSCHDlSiSched(cell, allocInfo, subfrmAlloc);
3450 #endif/*RGR_SI_SCH*/
3452 pcch = rgSCHDbmGetPcch(cell);
3456 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"PCCH on DLSCH is not configured");
3460 if (pcch->boLst.first != NULLP)
3462 bo = (RgSchClcBoRpt *)(pcch->boLst.first->node);
3464 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3466 /* Time to perform allocation for this PCCH transmission */
3467 rgSCHCmnClcAlloc(cell, sf, pcch, RGSCH_P_RNTI, allocInfo);
3475 * Fun: rgSCHCmnChkInWin
3477 * Desc: This function checks if frm occurs in window
3479 * Ret: TRUE - if in window
3484 * File: rg_sch_cmn.c
3488 PUBLIC Bool rgSCHCmnChkInWin
3490 CmLteTimingInfo frm,
3491 CmLteTimingInfo start,
3495 PUBLIC Bool rgSCHCmnChkInWin(frm, start, end)
3496 CmLteTimingInfo frm;
3497 CmLteTimingInfo start;
3498 CmLteTimingInfo end;
3503 TRC2(rgSCHCmnChkInWin);
3505 if (end.sfn > start.sfn)
3507 if (frm.sfn > start.sfn
3508 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3510 if (frm.sfn < end.sfn
3512 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3514 || (frm.sfn == end.sfn && frm.slot <= start.slot))
3521 /* Testing for wrap around, sfn wraparound check should be enough */
3522 else if (end.sfn < start.sfn)
3524 if (frm.sfn > start.sfn
3525 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3531 if (frm.sfn < end.sfn
3532 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3538 else /* start.sfn == end.sfn */
3540 if (frm.sfn == start.sfn
3541 && (frm.slot >= start.slot
3542 && frm.slot <= end.slot))
3549 } /* end of rgSCHCmnChkInWin*/
3553 * Fun: rgSCHCmnChkPastWin
3555 * Desc: This function checks if frm has gone past window edge
3557 * Ret: TRUE - if past window edge
3562 * File: rg_sch_cmn.c
3566 PUBLIC Bool rgSCHCmnChkPastWin
3568 CmLteTimingInfo frm,
3572 PUBLIC Bool rgSCHCmnChkPastWin(frm, end)
3573 CmLteTimingInfo frm;
3574 CmLteTimingInfo end;
3577 CmLteTimingInfo refFrm = end;
3580 TRC2(rgSCHCmnChkPastWin);
3582 RGSCH_INCR_FRAME(refFrm.sfn);
3583 RGSCH_INCR_SUB_FRAME(end, 1);
3584 pastWin = rgSCHCmnChkInWin(frm, end, refFrm);
3587 } /* end of rgSCHCmnChkPastWin*/
3590 * @brief This function implements allocation of the resources for common
3591 * channels BCCH, PCCH.
3595 * Function: rgSCHCmnClcAlloc
3596 * Purpose: This function implements selection of number of RBs based
3597 * the allowed grant for the service. It is also responsible
3598 * for selection of MCS for the transmission.
3600 * Invoked by: Scheduler
3602 * @param[in] RgSchCellCb *cell,
3603 * @param[in] RgSchDlSf *sf,
3604 * @param[in] RgSchClcDlLcCb *lch,
3605 * @param[in] U16 rnti,
3606 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3611 PRIVATE Void rgSCHCmnClcAlloc
3615 RgSchClcDlLcCb *lch,
3617 RgSchCmnDlRbAllocInfo *allocInfo
3620 PRIVATE Void rgSCHCmnClcAlloc(cell, sf, lch, rnti, allocInfo)
3623 RgSchClcDlLcCb *lch;
3625 RgSchCmnDlRbAllocInfo *allocInfo;
3628 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
3635 U8 cfi = cellDl->currCfi;
3638 TRC2(rgSCHCmnClcAlloc);
3640 bo = (RgSchClcBoRpt *)(lch->boLst.first->node);
3644 /* rgSCHCmnClcRbAllocForFxdTb(cell, bo->bo, cellDl->ccchCqi, &rb);*/
3645 if(cellDl->bitsPerRb==0)
3647 while ((rgTbSzTbl[0][0][rb]) < (tbs*8))
3655 rb = RGSCH_CEIL((tbs*8), cellDl->bitsPerRb);
3657 /* DwPTS Scheduling Changes Start */
3659 if(sf->sfType == RG_SCH_SPL_SF_DATA)
3661 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
3663 /* Calculate the less RE's because of DwPTS */
3664 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
3666 /* Increase number of RBs in Spl SF to compensate for lost REs */
3667 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
3670 /* DwPTS Scheduling Changes End */
3671 /*ccpu00115595- end*/
3672 /* additional check to see if required RBs
3673 * exceeds the available */
3674 if (rb > sf->bw - sf->bwAssigned)
3676 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"BW allocation "
3677 "failed for CRNTI:%d",rnti);
3681 /* Update the subframe Allocated BW field */
3682 sf->bwAssigned = sf->bwAssigned + rb;
3683 /* Fill in the BCCH/PCCH transmission info to the RBAllocInfo struct */
3684 if (rnti == RGSCH_SI_RNTI)
3686 allocInfo->bcchAlloc.rnti = rnti;
3687 allocInfo->bcchAlloc.dlSf = sf;
3688 allocInfo->bcchAlloc.tbInfo[0].bytesReq = tbs;
3689 allocInfo->bcchAlloc.rbsReq = rb;
3690 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
3691 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
3692 /* Nprb indication at PHY for common Ch */
3693 allocInfo->bcchAlloc.nPrb = bo->nPrb;
3697 allocInfo->pcchAlloc.rnti = rnti;
3698 allocInfo->pcchAlloc.dlSf = sf;
3699 allocInfo->pcchAlloc.tbInfo[0].bytesReq = tbs;
3700 allocInfo->pcchAlloc.rbsReq = rb;
3701 allocInfo->pcchAlloc.tbInfo[0].imcs = mcs;
3702 allocInfo->pcchAlloc.tbInfo[0].noLyr = 1;
3703 allocInfo->pcchAlloc.nPrb = bo->nPrb;
3710 * @brief This function implements PDCCH allocation for common channels.
3714 * Function: rgSCHCmnCmnPdcchAlloc
3715 * Purpose: This function implements allocation of PDCCH for a UE.
3716 * 1. This uses index 0 of PDCCH table for efficiency.
3717 * 2. Uses he candidate PDCCH count for the aggr level.
3718 * 3. Look for availability for each candidate and choose
3719 * the first one available.
3721 * Invoked by: Scheduler
3723 * @param[in] RgSchCellCb *cell
3724 * @param[in] RgSchDlSf *sf
3725 * @return RgSchPdcch *
3726 * -# NULLP when unsuccessful
3730 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc
3736 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc(cell, subFrm)
3741 CmLteAggrLvl aggrLvl;
3742 RgSchPdcchInfo *pdcchInfo;
3744 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3745 U8 numCce; /*store num CCEs based on
3746 aggregation level */
3747 TRC2(rgSCHCmnCmnPdcchAlloc);
3749 aggrLvl = cellSch->dl.cmnChAggrLvl;
3751 pdcchInfo = &(subFrm->pdcchInfo);
3753 /* Updating the no. of nCce in pdcchInfo, in case if CFI
3756 if(subFrm->nCce != pdcchInfo->nCce)
3758 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
3761 if(cell->nCce != pdcchInfo->nCce)
3763 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
3769 case CM_LTE_AGGR_LVL4:
3772 case CM_LTE_AGGR_LVL8:
3775 case CM_LTE_AGGR_LVL16:
3782 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
3785 pdcch->isSpsRnti = FALSE;
3787 /* Increment the CCE used counter in the current subframe */
3788 subFrm->cceCnt += numCce;
3789 pdcch->pdcchSearchSpace = RG_SCH_CMN_SEARCH_SPACE;
3794 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
3795 subFrm->isCceFailure = TRUE;
3797 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
3798 "PDCCH ERR: NO PDDCH AVAIL IN COMMON SEARCH SPACE aggr:%u",
3805 * @brief This function implements bandwidth allocation for common channels.
3809 * Function: rgSCHCmnClcRbAlloc
3810 * Purpose: This function implements bandwith allocation logic
3811 * for common control channels.
3813 * Invoked by: Scheduler
3815 * @param[in] RgSchCellCb* cell
3819 * @param[in] U32 *tbs
3820 * @param[in] U8 *mcs
3821 * @param[in] RgSchDlSf *sf
3827 PUBLIC Void rgSCHCmnClcRbAlloc
3840 PUBLIC Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, iTbs, isSpsBo)
3853 PRIVATE Void rgSCHCmnClcRbAlloc
3864 PRIVATE Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, sf)
3873 #endif /* LTEMAC_SPS */
3876 RgSchCmnTbSzEff *effTbl;
3879 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3880 U8 cfi = cellSch->dl.currCfi;
3882 TRC2(rgSCHCmnClcRbAlloc);
3884 /* first get the CQI to MCS table and determine the number of RBs */
3885 effTbl = (RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]);
3886 iTbsVal = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[cqi];
3887 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3889 /* Efficiency is number of bits per 1024 REs */
3890 eff = (*effTbl)[iTbsVal];
3892 /* Get the number of REs needed for this bo */
3893 noRes = ((bo * 8 * 1024) / eff );
3895 /* Get the number of RBs needed for this transmission */
3896 /* Number of RBs = No of REs / No of REs per RB */
3897 tmpRb = RGSCH_CEIL(noRes, cellSch->dl.noResPerRb[cfi]);
3898 /* KWORK_FIX: added check to see if rb has crossed maxRb*/
3899 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3900 if (tmpRb > cellSch->dl.maxDlBwPerUe)
3902 tmpRb = cellSch->dl.maxDlBwPerUe;
3904 while ((rgTbSzTbl[0][iTbsVal][tmpRb-1]/8) < bo &&
3905 (tmpRb < cellSch->dl.maxDlBwPerUe))
3908 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3910 *tbs = rgTbSzTbl[0][iTbsVal][tmpRb-1]/8;
3912 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3920 * @brief Scheduling for MSG4.
3924 * Function: rgSCHCmnMsg4Alloc
3925 * Purpose: Scheduling for MSG4
3927 * Invoked by: Scheduler
3929 * @param[in] RgSchCellCb* cell
3930 * @param[in] RgSchRaCb* raCb
3931 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3936 PRIVATE S16 rgSCHCmnMsg4Alloc
3940 RgSchCmnDlRbAllocInfo *allocInfo
3943 PRIVATE S16 rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)
3946 RgSchCmnDlRbAllocInfo *allocInfo;
3949 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3951 TRC2(rgSCHCmnMsg4Alloc);
3953 /* SR_RACH_STATS : MSG4 TO BE TXED */
3955 /* Return if subframe BW exhausted */
3956 if (allocInfo->msg4Alloc.msg4DlSf->bw <=
3957 allocInfo->msg4Alloc.msg4DlSf->bwAssigned)
3959 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId ,
3964 if (rgSCHDhmGetMsg4HqProc(raCb, cellSch->dl.time) != ROK)
3966 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3967 "rgSCHDhmGetMsg4HqProc failed");
3971 raCb->rbAllocInfo.dlSf = allocInfo->msg4Alloc.msg4DlSf;
3973 if (rgSCHCmnMsg4DedAlloc(cell, raCb) != ROK)
3975 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
3976 rgSCHDhmRlsHqpTb(raCb->dlHqE->msg4Proc, 0, FALSE);
3977 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3978 "rgSCHCmnMsg4DedAlloc failed.");
3981 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4TxLst, &raCb->dlHqE->msg4Proc->reqLnk);
3982 raCb->dlHqE->msg4Proc->reqLnk.node = (PTR)raCb->dlHqE->msg4Proc;
3983 allocInfo->msg4Alloc.msg4DlSf->schdCcchUe++;
3990 * @brief This function implements PDCCH allocation for an UE.
3994 * Function: PdcchAlloc
3995 * Purpose: This function implements allocation of PDCCH for an UE.
3996 * 1. Get the aggregation level for the CQI of the UE.
3997 * 2. Get the candidate PDCCH count for the aggr level.
3998 * 3. Look for availability for each candidate and choose
3999 * the first one available.
4001 * Invoked by: Scheduler
4006 * @param[in] dciFrmt
4007 * @return RgSchPdcch *
4008 * -# NULLP when unsuccessful
4012 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc
4018 TfuDciFormat dciFrmt,
4022 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc(cell, subFrm, cqi, dciFrmt, isDtx)
4027 TfuDciFormat dciFrmt;
4031 CmLteAggrLvl aggrLvl;
4032 RgSchPdcchInfo *pdcchInfo;
4035 TRC2(rgSCHCmnPdcchAlloc);
4037 /* 3.1 consider the selected DCI format size in determining the
4038 * aggregation level */
4039 //TODO_SID Need to update. Currently using 4 aggregation level
4040 aggrLvl = CM_LTE_AGGR_LVL2;//cellSch->dciAggrLvl[cqi][dciFrmt];
4043 if((dciFrmt == TFU_DCI_FORMAT_1A) &&
4044 ((ue) && (ue->allocCmnUlPdcch)) )
4046 pdcch = rgSCHCmnCmnPdcchAlloc(cell, subFrm);
4047 /* Since CRNTI Scrambled */
4050 pdcch->dciNumOfBits = ue->dciSize.cmnSize[dciFrmt];
4051 // prc_trace_format_string(PRC_TRACE_GROUP_PS, PRC_TRACE_INFO_LOW,"Forcing alloc in CMN search spc size %d fmt %d \n",
4052 // pdcch->dciNumOfBits, dciFrmt);
4058 /* Incrementing aggrLvl by 1 if it not AGGR_LVL8(MAX SIZE)
4059 * inorder to increse the redudancy bits for better decoding of UE */
4062 if (aggrLvl != CM_LTE_AGGR_LVL16)
4066 case CM_LTE_AGGR_LVL2:
4067 aggrLvl = CM_LTE_AGGR_LVL4;
4069 case CM_LTE_AGGR_LVL4:
4070 aggrLvl = CM_LTE_AGGR_LVL8;
4072 case CM_LTE_AGGR_LVL8:
4073 aggrLvl = CM_LTE_AGGR_LVL16;
4082 pdcchInfo = &subFrm->pdcchInfo;
4084 /* Updating the no. of nCce in pdcchInfo, in case if CFI
4087 if(subFrm->nCce != pdcchInfo->nCce)
4089 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
4092 if(cell->nCce != pdcchInfo->nCce)
4094 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
4098 if (pdcchInfo->nCce < (1 << (aggrLvl - 1)))
4100 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4101 subFrm->isCceFailure = TRUE;
4102 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4103 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4109 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
4111 /* SR_RACH_STATS : Reset isTBMsg4 */
4112 pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4= FALSE;
4113 pdcch->dci.u.format0Info.isSrGrant = FALSE;
4115 pdcch->isSpsRnti = FALSE;
4117 /* Increment the CCE used counter in the current subframe */
4118 subFrm->cceCnt += aggrLvl;
4119 pdcch->pdcchSearchSpace = RG_SCH_UE_SPECIFIC_SEARCH_SPACE;
4123 if (ue->cell != cell)
4125 /* Secondary Cell */
4126 //pdcch->dciNumOfBits = ue->dciSize.noUlCcSize[dciFrmt];
4127 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4132 //pdcch->dciNumOfBits = ue->dciSize.dedSize[dciFrmt];
4133 //TODO_SID Need to update dci size.
4134 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4140 pdcch->dciNumOfBits = cell->dciSize.size[dciFrmt];
4145 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4146 subFrm->isCceFailure = TRUE;
4148 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4149 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4156 * @brief This function implements BW allocation for CCCH SDU
4160 * Function: rgSCHCmnCcchSduDedAlloc
4161 * Purpose: Downlink bandwidth Allocation for CCCH SDU.
4163 * Invoked by: Scheduler
4165 * @param[in] RgSchCellCb* cell
4166 * @param[out] RgSchUeCb *ueCb
4171 PRIVATE S16 rgSCHCmnCcchSduDedAlloc
4177 PRIVATE S16 rgSCHCmnCcchSduDedAlloc(cell, ueCb)
4182 RgSchDlHqEnt *hqE = NULLP;
4184 RgSchDlRbAlloc *rbAllocinfo = NULLP;
4185 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4189 U8 cfi = cellDl->currCfi;
4192 TRC2(rgSCHCmnCcchSduDedAlloc);
4194 rbAllocinfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
4196 effBo = ueCb->dlCcchInfo.bo + RGSCH_CCCH_SDU_HDRSIZE;
4199 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4200 &rbAllocinfo->tbInfo[0].bytesReq,
4201 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4202 #else /* LTEMAC_SPS */
4203 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4204 &rbAllocinfo->tbInfo[0].bytesReq,\
4205 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4207 #endif /* LTEMAC_SPS */
4210 /* Cannot exceed the total number of RBs in the cell */
4211 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4212 rbAllocinfo->dlSf->bwAssigned)))
4214 /* Check if atleast one allocation was possible.
4215 This may be the case where the Bw is very less and
4216 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4217 if (rbAllocinfo->dlSf->bwAssigned == 0)
4219 numRb = rbAllocinfo->dlSf->bw;
4220 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4221 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4225 rbAllocinfo->rbsReq = numRb;
4226 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4227 /* DwPTS Scheduling Changes Start */
4229 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4231 rbAllocinfo->tbInfo[0].bytesReq =
4232 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1,cfi);
4235 /* DwPTS Scheduling Changes End */
4236 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4244 /* Update the subframe Allocated BW field */
4245 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4246 rbAllocinfo->rbsReq;
4247 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
4248 rbAllocinfo->tbInfo[0].tbCb = &hqE->ccchSduProc->tbInfo[0];
4249 rbAllocinfo->rnti = ueCb->ueId;
4250 rbAllocinfo->tbInfo[0].noLyr = 1;
4257 * @brief This function implements BW allocation for MSG4
4261 * Function: rgSCHCmnMsg4DedAlloc
4262 * Purpose: Downlink bandwidth Allocation for MSG4.
4264 * Invoked by: Scheduler
4266 * @param[in] RgSchCellCb* cell
4267 * @param[out] RgSchRaCb *raCb
4272 PRIVATE S16 rgSCHCmnMsg4DedAlloc
4278 PRIVATE S16 rgSCHCmnMsg4DedAlloc(cell, raCb)
4284 RgSchDlRbAlloc *rbAllocinfo = &raCb->rbAllocInfo;
4288 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4289 U8 cfi = cellDl->currCfi;
4292 TRC2(rgSCHCmnMsg4DedAlloc);
4294 effBo = raCb->dlCcchInfo.bo + RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE;
4297 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4298 &rbAllocinfo->tbInfo[0].bytesReq,\
4299 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4300 #else /* LTEMAC_SPS */
4301 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4302 &rbAllocinfo->tbInfo[0].bytesReq,\
4303 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4305 #endif /* LTEMAC_SPS */
4308 /* Cannot exceed the total number of RBs in the cell */
4309 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4310 rbAllocinfo->dlSf->bwAssigned)))
4312 /* Check if atleast one allocation was possible.
4313 This may be the case where the Bw is very less and
4314 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4315 if (rbAllocinfo->dlSf->bwAssigned == 0)
4317 numRb = rbAllocinfo->dlSf->bw;
4318 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4319 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4323 rbAllocinfo->rbsReq = numRb;
4324 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4325 /* DwPTS Scheduling Changes Start */
4327 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4329 rbAllocinfo->tbInfo[0].bytesReq =
4330 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1, cfi);
4333 /* DwPTS Scheduling Changes End */
4334 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4342 /* Update the subframe Allocated BW field */
4343 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4344 rbAllocinfo->rbsReq;
4345 rbAllocinfo->rnti = raCb->tmpCrnti;
4346 rbAllocinfo->tbInfo[0].tbCb = &raCb->dlHqE->msg4Proc->tbInfo[0];
4347 rbAllocinfo->tbInfo[0].schdlngForTb = TRUE;
4348 rbAllocinfo->tbInfo[0].noLyr = 1;
4355 * @brief This function implements scheduling for RA Response.
4359 * Function: rgSCHCmnDlRaRsp
4360 * Purpose: Downlink scheduling for RA responses.
4362 * Invoked by: Scheduler
4364 * @param[in] RgSchCellCb* cell
4369 PRIVATE Void rgSCHCmnDlRaRsp
4372 RgSchCmnDlRbAllocInfo *allocInfo
4375 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4377 RgSchCmnDlRbAllocInfo *allocInfo;
4380 CmLteTimingInfo frm;
4381 CmLteTimingInfo schFrm;
4387 RgSchTddRachRspLst *rachRsp;
4388 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
4392 TRC2(rgSCHCmnDlRaRsp);
4394 frm = cell->crntTime;
4395 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4397 /* Compute the subframe for which allocation is being made */
4398 /* essentially, we need pointer to the dl frame for this subframe */
4399 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4401 /* Get the RACH Response scheduling related information
4402 * for the subframe with RA index */
4403 raIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][frm.subframe]-1;
4405 rachRsp = &cell->rachRspLst[raIdx];
4407 for(sfnIdx = 0; sfnIdx < rachRsp->numRadiofrms; sfnIdx++)
4409 /* For all scheduled RACH Responses in SFNs */
4411 RG_SCH_CMN_DECR_FRAME(schFrm.sfn, rachRsp->rachRsp[sfnIdx].sfnOffset);
4412 /* For all scheduled RACH Responses in subframes */
4414 subfrmIdx < rachRsp->rachRsp[sfnIdx].numSubfrms; subfrmIdx++)
4416 schFrm.subframe = rachRsp->rachRsp[sfnIdx].subframe[subfrmIdx];
4417 /* compute the last RA RNTI used in the previous subframe */
4418 raIdx = (((schFrm.sfn % cell->raInfo.maxRaSize) * \
4419 RGSCH_NUM_SUB_FRAMES * RGSCH_MAX_RA_RNTI_PER_SUBFRM) \
4422 /* For all RA RNTIs within a subframe */
4424 for(i=0; (i < RGSCH_MAX_RA_RNTI_PER_SUBFRM) && \
4425 (noRaRnti < RGSCH_MAX_TDD_RA_RSP_ALLOC); i++)
4427 rarnti = (schFrm.subframe + RGSCH_NUM_SUB_FRAMES*i + 1);
4428 rntiIdx = (raIdx + RGSCH_NUM_SUB_FRAMES*i);
4430 if (cell->raInfo.raReqLst[rntiIdx].first != NULLP)
4432 /* compute the next RA RNTI */
4433 if (rgSCHCmnRaRspAlloc(cell, subFrm, rntiIdx,
4434 rarnti, noRaRnti, allocInfo) != ROK)
4436 /* The resources are exhausted */
4450 * @brief This function implements scheduling for RA Response.
4454 * Function: rgSCHCmnDlRaRsp
4455 * Purpose: Downlink scheduling for RA responses.
4457 * Invoked by: Scheduler
4459 * @param[in] RgSchCellCb* cell
4460 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4465 PRIVATE Void rgSCHCmnDlRaRsp //FDD
4468 RgSchCmnDlRbAllocInfo *allocInfo
4471 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4473 RgSchCmnDlRbAllocInfo *allocInfo;
4476 CmLteTimingInfo frm;
4477 CmLteTimingInfo winStartFrm;
4483 RgSchCmnCell *sched;
4485 TRC2(rgSCHCmnDlRaRsp);
4487 frm = cell->crntTime;
4488 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4490 /* Compute the subframe for which allocation is being made */
4491 /* essentially, we need pointer to the dl frame for this subframe */
4492 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4493 sched = RG_SCH_CMN_GET_CELL(cell);
4495 /* ccpu00132523 - Window Start calculated by considering RAR window size,
4496 * RAR Wait period, Subframes occuppied for respective preamble format*/
4497 winGap = (sched->dl.numRaSubFrms-1) + (cell->rachCfg.raWinSize-1)
4498 +RGSCH_RARSP_WAIT_PERIOD;
4500 /* Window starting occassion is retrieved using the gap and tried to
4501 * fit to the size of raReqLst array*/
4502 RGSCHDECRFRMCRNTTIME(frm, winStartFrm, winGap);
4504 //5G_TODO TIMING update. Need to check
4505 winStartIdx = (winStartFrm.sfn & 1) * RGSCH_MAX_RA_RNTI+ winStartFrm.slot;
4507 for(i = 0; ((i < cell->rachCfg.raWinSize) && (noRaRnti < RG_SCH_CMN_MAX_CMN_PDCCH)); i++)
4509 raIdx = (winStartIdx + i) % RGSCH_RAREQ_ARRAY_SIZE;
4511 if (cell->raInfo.raReqLst[raIdx].first != NULLP)
4513 allocInfo->raRspAlloc[noRaRnti].biEstmt = \
4514 (!i * RGSCH_ONE_BIHDR_SIZE);
4515 rarnti = raIdx % RGSCH_MAX_RA_RNTI+ 1;
4516 if (rgSCHCmnRaRspAlloc(cell, subFrm, raIdx,
4517 rarnti, noRaRnti, allocInfo) != ROK)
4519 /* The resources are exhausted */
4522 /* ccpu00132523- If all the RAP ids are not scheduled then need not
4523 * proceed for next RA RNTIs*/
4524 if(allocInfo->raRspAlloc[noRaRnti].numRapids < cell->raInfo.raReqLst[raIdx].count)
4528 noRaRnti++; /* Max of RG_SCH_CMN_MAX_CMN_PDCCH RARNTIs
4529 for response allocation */
4538 * @brief This function allocates the resources for an RARNTI.
4542 * Function: rgSCHCmnRaRspAlloc
4543 * Purpose: Allocate resources to a RARNTI.
4544 * 0. Allocate PDCCH for sending the response.
4545 * 1. Locate the number of RA requests pending for the RARNTI.
4546 * 2. Compute the size of data to be built.
4547 * 3. Using common channel CQI, compute the number of RBs.
4549 * Invoked by: Scheduler
4551 * @param[in] RgSchCellCb *cell,
4552 * @param[in] RgSchDlSf *subFrm,
4553 * @param[in] U16 rarnti,
4554 * @param[in] U8 noRaRnti
4555 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4560 PRIVATE S16 rgSCHCmnRaRspAlloc
4567 RgSchCmnDlRbAllocInfo *allocInfo
4570 PRIVATE S16 rgSCHCmnRaRspAlloc(cell,subFrm,raIndex,rarnti,noRaRnti,allocInfo)
4576 RgSchCmnDlRbAllocInfo *allocInfo;
4579 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4580 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4584 /*ccpu00116700,ccpu00116708- Corrected the wrong type for mcs*/
4587 /* RACH handling related changes */
4588 Bool isAlloc = FALSE;
4589 static U8 schdNumRapid = 0;
4595 U8 cfi = cellDl->currCfi;
4598 TRC2(rgSCHCmnRaRspAlloc);
4603 /* ccpu00132523: Resetting the schdRap Id count in every scheduling subframe*/
4610 if (subFrm->bw == subFrm->bwAssigned)
4612 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4613 "bw == bwAssigned RARNTI:%d",rarnti);
4617 reqLst = &cell->raInfo.raReqLst[raIndex];
4618 if (reqLst->count == 0)
4620 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4621 "reqLst Count=0 RARNTI:%d",rarnti);
4624 remNumRapid = reqLst->count;
4627 /* Limit number of rach rsps to maxMsg3PerUlsf */
4628 if ( schdNumRapid+remNumRapid > cellUl->maxMsg3PerUlSf )
4630 remNumRapid = cellUl->maxMsg3PerUlSf-schdNumRapid;
4636 /* Try allocating for as many RAPIDs as possible */
4637 /* BI sub-header size to the tbSize requirement */
4638 noBytes = RGSCH_GET_RAR_BYTES(remNumRapid) +\
4639 allocInfo->raRspAlloc[noRaRnti].biEstmt;
4640 if ((allwdTbSz = rgSCHUtlGetAllwdCchTbSz(noBytes*8, &nPrb, &mcs)) == -1)
4646 /* rgSCHCmnClcRbAllocForFxdTb(cell, allwdTbSz/8, cellDl->ccchCqi, &rb);*/
4647 if(cellDl->bitsPerRb==0)
4649 while ((rgTbSzTbl[0][0][rb]) <(U32) allwdTbSz)
4657 rb = RGSCH_CEIL(allwdTbSz, cellDl->bitsPerRb);
4659 /* DwPTS Scheduling Changes Start */
4661 if (subFrm->sfType == RG_SCH_SPL_SF_DATA)
4663 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
4665 /* Calculate the less RE's because of DwPTS */
4666 lostRe = rb * (cellDl->noResPerRb[cfi] -
4667 cellDl->numReDwPts[cfi]);
4669 /* Increase number of RBs in Spl SF to compensate for lost REs */
4670 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
4673 /* DwPTS Scheduling Changes End */
4675 /*ccpu00115595- end*/
4676 if (rb > subFrm->bw - subFrm->bwAssigned)
4681 /* Allocation succeeded for 'remNumRapid' */
4684 printf("\n!!!RAR alloc noBytes:%u,allwdTbSz:%u,tbs:%u,rb:%u\n",
4685 noBytes,allwdTbSz,tbs,rb);
4690 RLOG_ARG0(L_INFO,DBG_CELLID,cell->cellId,"BW alloc Failed");
4694 subFrm->bwAssigned = subFrm->bwAssigned + rb;
4696 /* Fill AllocInfo structure */
4697 allocInfo->raRspAlloc[noRaRnti].rnti = rarnti;
4698 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].bytesReq = tbs;
4699 allocInfo->raRspAlloc[noRaRnti].rbsReq = rb;
4700 allocInfo->raRspAlloc[noRaRnti].dlSf = subFrm;
4701 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].imcs = mcs;
4702 allocInfo->raRspAlloc[noRaRnti].raIndex = raIndex;
4703 /* RACH changes for multiple RAPID handling */
4704 allocInfo->raRspAlloc[noRaRnti].numRapids = remNumRapid;
4705 allocInfo->raRspAlloc[noRaRnti].nPrb = nPrb;
4706 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].noLyr = 1;
4707 allocInfo->raRspAlloc[noRaRnti].vrbgReq = RGSCH_CEIL(nPrb,MAX_5GTF_VRBG_SIZE);
4708 schdNumRapid += remNumRapid;
4712 /***********************************************************
4714 * Func : rgSCHCmnUlAllocFillRbInfo
4716 * Desc : Fills the start RB and the number of RBs for
4717 * uplink allocation.
4725 **********************************************************/
4727 PUBLIC Void rgSCHCmnUlAllocFillRbInfo
4734 PUBLIC Void rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc)
4737 RgSchUlAlloc *alloc;
4740 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4741 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4742 U8 cfi = cellDl->currCfi;
4745 TRC2(rgSCHCmnUlAllocFillRbInfo);
4746 alloc->grnt.rbStart = (alloc->sbStart * cellUl->sbSize) +
4747 cell->dynCfiCb.bwInfo[cfi].startRb;
4749 /* Num RBs = numSbAllocated * sbSize - less RBs in the last SB */
4750 alloc->grnt.numRb = (alloc->numSb * cellUl->sbSize);
4756 * @brief Grant request for Msg3.
4760 * Function : rgSCHCmnMsg3GrntReq
4762 * This is invoked by downlink scheduler to request allocation
4765 * - Attempt to allocate msg3 in the current msg3 subframe
4766 * Allocation attempt based on whether preamble is from group A
4767 * and the value of MESSAGE_SIZE_GROUP_A
4768 * - Link allocation with passed RNTI and msg3 HARQ process
4769 * - Set the HARQ process ID (*hqProcIdRef)
4771 * @param[in] RgSchCellCb *cell
4772 * @param[in] CmLteRnti rnti
4773 * @param[in] Bool preamGrpA
4774 * @param[in] RgSchUlHqProcCb *hqProc
4775 * @param[out] RgSchUlAlloc **ulAllocRef
4776 * @param[out] U8 *hqProcIdRef
4780 PRIVATE Void rgSCHCmnMsg3GrntReq
4785 RgSchUlHqProcCb *hqProc,
4786 RgSchUlAlloc **ulAllocRef,
4790 PRIVATE Void rgSCHCmnMsg3GrntReq(cell, rnti, preamGrpA, hqProc,
4791 ulAllocRef, hqProcIdRef)
4795 RgSchUlHqProcCb *hqProc;
4796 RgSchUlAlloc **ulAllocRef;
4800 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4801 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
4803 RgSchUlAlloc *alloc;
4807 TRC2(rgSCHCmnMsg3GrntReq);
4809 *ulAllocRef = NULLP;
4811 /* Fix: ccpu00120610 Use remAllocs from subframe during msg3 allocation */
4812 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
4816 if (preamGrpA == FALSE)
4818 numSb = cellUl->ra.prmblBNumSb;
4819 iMcs = cellUl->ra.prmblBIMcs;
4823 numSb = cellUl->ra.prmblANumSb;
4824 iMcs = cellUl->ra.prmblAIMcs;
4827 if ((hole = rgSCHUtlUlHoleFirst(sf)) != NULLP)
4829 if(*sf->allocCountRef == 0)
4831 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4832 /* Reinitialize the hole */
4833 if (sf->holeDb->count == 1 && (hole->start == 0)) /* Sanity check of holeDb */
4835 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4836 /* Re-Initialize available subbands because of CFI change*/
4837 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4841 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4842 "Error! holeDb sanity check failed RNTI:%d",rnti);
4845 if (numSb <= hole->num)
4848 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
4849 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
4850 alloc->grnt.iMcs = iMcs;
4851 alloc->grnt.iMcsCrnt = iMcs;
4852 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
4853 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
4854 /* To include the length and ModOrder in DataRecp Req.*/
4855 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
4856 RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
4857 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
4858 alloc->grnt.nDmrs = 0;
4859 alloc->grnt.hop = 0;
4860 alloc->grnt.delayBit = 0;
4861 alloc->grnt.isRtx = FALSE;
4862 *ulAllocRef = alloc;
4863 *hqProcIdRef = (cellUl->msg3SchdHqProcIdx);
4864 hqProc->procId = *hqProcIdRef;
4865 hqProc->ulSfIdx = (cellUl->msg3SchdIdx);
4868 alloc->pdcch = FALSE;
4869 alloc->forMsg3 = TRUE;
4870 alloc->hqProc = hqProc;
4871 rgSCHUhmNewTx(hqProc, (U8)(cell->rachCfg.maxMsg3Tx - 1), alloc);
4872 //RLOG_ARG4(L_DEBUG,DBG_CELLID,cell->cellId,
4874 "\nRNTI:%d MSG3 ALLOC proc(%p)procId(%d)schdIdx(%d)\n",
4876 ((PTR)alloc->hqProc),
4877 alloc->hqProc->procId,
4878 alloc->hqProc->ulSfIdx);
4879 RLOG_ARG2(L_DEBUG,DBG_CELLID,cell->cellId,
4880 "alloc(%p)maxMsg3Tx(%d)",
4882 cell->rachCfg.maxMsg3Tx);
4891 * @brief This function determines the allocation limits and
4892 * parameters that aid in DL scheduling.
4896 * Function: rgSCHCmnDlSetUeAllocLmt
4897 * Purpose: This function determines the Maximum RBs
4898 * a UE is eligible to get based on softbuffer
4899 * limitation and cell->>>maxDlBwPerUe. The Codeword
4900 * specific parameters like iTbs, eff and noLyrs
4901 * are also set in this function. This function
4902 * is called while UE configuration and UeDlCqiInd.
4904 * Invoked by: Scheduler
4906 * @param[in] RgSchCellCb *cellCb
4907 * @param[in] RgSchCmnDlUe *ueDl
4912 PRIVATE Void rgSCHCmnDlSetUeAllocLmt
4919 PRIVATE Void rgSCHCmnDlSetUeAllocLmt(cell, ueDl, isEmtcUe)
4927 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
4928 U8 cfi = cellSch->dl.currCfi;
4930 TRC2(rgSCHCmnDlSetUeAllocLmt);
4933 if(TRUE == isEmtcUe)
4935 /* ITbs for CW0 for 1 Layer Tx */
4936 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4937 [ueDl->mimoInfo.cwInfo[0].cqi];
4938 /* ITbs for CW0 for 2 Layer Tx */
4939 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4940 [ueDl->mimoInfo.cwInfo[0].cqi];
4941 /* Eff for CW0 for 1 Layer Tx */
4942 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4943 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4944 /* Eff for CW0 for 2 Layer Tx */
4945 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4946 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4948 /* ITbs for CW1 for 1 Layer Tx */
4949 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4950 [ueDl->mimoInfo.cwInfo[1].cqi];
4951 /* ITbs for CW1 for 2 Layer Tx */
4952 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4953 [ueDl->mimoInfo.cwInfo[1].cqi];
4954 /* Eff for CW1 for 1 Layer Tx */
4955 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4956 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4957 /* Eff for CW1 for 2 Layer Tx */
4958 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4959 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4964 /* ITbs for CW0 for 1 Layer Tx */
4965 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4966 [ueDl->mimoInfo.cwInfo[0].cqi];
4967 /* ITbs for CW0 for 2 Layer Tx */
4968 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4969 [ueDl->mimoInfo.cwInfo[0].cqi];
4970 /* Eff for CW0 for 1 Layer Tx */
4971 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4972 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4973 /* Eff for CW0 for 2 Layer Tx */
4974 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4975 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4977 /* ITbs for CW1 for 1 Layer Tx */
4978 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4979 [ueDl->mimoInfo.cwInfo[1].cqi];
4980 /* ITbs for CW1 for 2 Layer Tx */
4981 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4982 [ueDl->mimoInfo.cwInfo[1].cqi];
4983 /* Eff for CW1 for 1 Layer Tx */
4984 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4985 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4986 /* Eff for CW1 for 2 Layer Tx */
4987 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4988 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4992 // ueDl->laCb.cqiBasediTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0] * 100;
4994 /* Assigning noLyrs to each CW assuming optimal Spatial multiplexing
4996 (ueDl->mimoInfo.ri/2 == 0)? (ueDl->mimoInfo.cwInfo[0].noLyr = 1) : \
4997 (ueDl->mimoInfo.cwInfo[0].noLyr = ueDl->mimoInfo.ri/2);
4998 ueDl->mimoInfo.cwInfo[1].noLyr = ueDl->mimoInfo.ri - ueDl->mimoInfo.cwInfo[0].noLyr;
4999 /* rg002.101:ccpu00102106: correcting DL harq softbuffer limitation logic.
5000 * The maxTbSz is the maximum number of PHY bits a harq process can
5001 * hold. Hence we limit our allocation per harq process based on this.
5002 * Earlier implementation we misinterpreted the maxTbSz to be per UE
5003 * per TTI, but in fact it is per Harq per TTI. */
5004 /* rg002.101:ccpu00102106: cannot exceed the harq Tb Size
5005 * and harq Soft Bits limit.*/
5007 /* Considering iTbs corresponding to 2 layer transmission for
5008 * codeword0(approximation) and the maxLayers supported by
5009 * this UE at this point of time. */
5010 RG_SCH_CMN_TBS_TO_MODODR(ueDl->mimoInfo.cwInfo[0].iTbs[1], modOrder);
5012 /* Bits/modOrder gives #REs, #REs/noResPerRb gives #RBs */
5013 /* rg001.301 -MOD- [ccpu00119213] : avoiding wraparound */
5014 maxRb = ((ueDl->maxSbSz)/(cellSch->dl.noResPerRb[cfi] * modOrder *\
5015 ueDl->mimoInfo.ri));
5016 if (cellSch->dl.isDlFreqSel)
5018 /* Rounding off to left nearest multiple of RBG size */
5019 maxRb -= maxRb % cell->rbgSize;
5021 ueDl->maxRb = RGSCH_MIN(maxRb, cellSch->dl.maxDlBwPerUe);
5022 if (cellSch->dl.isDlFreqSel)
5024 /* Rounding off to right nearest multiple of RBG size */
5025 if (ueDl->maxRb % cell->rbgSize)
5027 ueDl->maxRb += (cell->rbgSize -
5028 (ueDl->maxRb % cell->rbgSize));
5032 /* Set the index of the cwInfo, which is better in terms of
5033 * efficiency. If RI<2, only 1 CW, hence btrCwIdx shall be 0 */
5034 if (ueDl->mimoInfo.ri < 2)
5036 ueDl->mimoInfo.btrCwIdx = 0;
5040 if (ueDl->mimoInfo.cwInfo[0].eff[ueDl->mimoInfo.cwInfo[0].noLyr-1] <\
5041 ueDl->mimoInfo.cwInfo[1].eff[ueDl->mimoInfo.cwInfo[1].noLyr-1])
5043 ueDl->mimoInfo.btrCwIdx = 1;
5047 ueDl->mimoInfo.btrCwIdx = 0;
5057 * @brief This function updates TX Scheme.
5061 * Function: rgSCHCheckAndSetTxScheme
5062 * Purpose: This function determines the Maximum RBs
5063 * a UE is eligible to get based on softbuffer
5064 * limitation and cell->>>maxDlBwPerUe. The Codeword
5065 * specific parameters like iTbs, eff and noLyrs
5066 * are also set in this function. This function
5067 * is called while UE configuration and UeDlCqiInd.
5069 * Invoked by: Scheduler
5071 * @param[in] RgSchCellCb *cell
5072 * @param[in] RgSchUeCb *ue
5077 PRIVATE Void rgSCHCheckAndSetTxScheme
5083 PRIVATE Void rgSCHCheckAndSetTxScheme(cell, ue)
5088 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5089 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue ,cell);
5090 U8 cfi = cellSch->dl.currCfi;
5095 TRC2(rgSCHCheckAndSetTxScheme);
5097 maxiTbs = (*(RgSchCmnCqiToTbs*)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
5098 [RG_SCH_CMN_MAX_CQI - 1];
5099 cqiBasediTbs = (ueDl->laCb[0].cqiBasediTbs)/100;
5100 actualiTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0];
5102 if((actualiTbs < RG_SCH_TXSCHEME_CHNG_ITBS_FACTOR) && (cqiBasediTbs >
5103 actualiTbs) && ((cqiBasediTbs - actualiTbs) > RG_SCH_TXSCHEME_CHNG_THRSHD))
5105 RG_SCH_CMN_SET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5108 if(actualiTbs >= maxiTbs)
5110 RG_SCH_CMN_UNSET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5117 * @brief This function determines the allocation limits and
5118 * parameters that aid in DL scheduling.
5122 * Function: rgSCHCmnDlSetUeAllocLmtLa
5123 * Purpose: This function determines the Maximum RBs
5124 * a UE is eligible to get based on softbuffer
5125 * limitation and cell->>>maxDlBwPerUe. The Codeword
5126 * specific parameters like iTbs, eff and noLyrs
5127 * are also set in this function. This function
5128 * is called while UE configuration and UeDlCqiInd.
5130 * Invoked by: Scheduler
5132 * @param[in] RgSchCellCb *cell
5133 * @param[in] RgSchUeCb *ue
5138 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa
5144 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa(cell, ue)
5152 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5153 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
5154 U8 cfi = cellSch->dl.currCfi;
5158 TRC2(rgSCHCmnDlSetUeAllocLmtLa);
5160 maxiTbs = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[RG_SCH_CMN_MAX_CQI - 1];
5161 if(ueDl->cqiFlag == TRUE)
5163 for(cwIdx=0; cwIdx < RG_SCH_CMN_MAX_CW_PER_UE; cwIdx++)
5167 /* Calcluating the reported iTbs for code word 0 */
5168 reportediTbs = ue->ue5gtfCb.mcs;
5170 iTbsNew = (S32) reportediTbs;
5172 if(!ueDl->laCb[cwIdx].notFirstCqi)
5174 /* This is the first CQI report from UE */
5175 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5176 ueDl->laCb[cwIdx].notFirstCqi = TRUE;
5178 else if ((RG_ITBS_DIFF(reportediTbs, ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0])) > 5)
5180 /* Ignore this iTBS report and mark that last iTBS report was */
5181 /* ignored so that subsequently we reset the LA algorithm */
5182 ueDl->laCb[cwIdx].lastiTbsIgnored = TRUE;
5183 ueDl->laCb[cwIdx].numLastiTbsIgnored++;
5184 if( ueDl->laCb[cwIdx].numLastiTbsIgnored > 10)
5186 /* CQI reported by UE is not catching up. Reset the LA algorithm */
5187 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5188 ueDl->laCb[cwIdx].deltaiTbs = 0;
5189 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5190 ueDl->laCb[cwIdx].numLastiTbsIgnored = 0;
5195 if (ueDl->laCb[cwIdx].lastiTbsIgnored != TRUE)
5197 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5198 (80 * ueDl->laCb[cwIdx].cqiBasediTbs))/100;
5202 /* Reset the LA as iTbs in use caught up with the value */
5203 /* reported by UE. */
5204 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5205 (80 * ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] * 100))/100;
5206 ueDl->laCb[cwIdx].deltaiTbs = 0;
5207 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5211 iTbsNew = (ueDl->laCb[cwIdx].cqiBasediTbs + ueDl->laCb[cwIdx].deltaiTbs)/100;
5213 RG_SCH_CHK_ITBS_RANGE(iTbsNew, maxiTbs);
5215 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] = RGSCH_MIN(iTbsNew, cell->thresholds.maxDlItbs);
5216 //ueDl->mimoInfo.cwInfo[cwIdx].iTbs[1] = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5218 ue->ue5gtfCb.mcs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5220 printf("reportediTbs[%d] cqiBasediTbs[%d] deltaiTbs[%d] iTbsNew[%d] mcs[%d] cwIdx[%d]\n",
5221 reportediTbs, ueDl->laCb[cwIdx].cqiBasediTbs, ueDl->laCb[cwIdx].deltaiTbs,
5222 iTbsNew, ue->ue5gtfCb.mcs, cwIdx);
5226 if((ue->mimoInfo.txMode != RGR_UE_TM_3) && (ue->mimoInfo.txMode != RGR_UE_TM_4))
5231 ueDl->cqiFlag = FALSE;
5238 /***********************************************************
5240 * Func : rgSCHCmnDlUeResetTemp
5242 * Desc : Reset whatever variables where temporarily used
5243 * during UE scheduling.
5251 **********************************************************/
5253 PUBLIC Void rgSCHCmnDlHqPResetTemp
5255 RgSchDlHqProcCb *hqP
5258 PUBLIC Void rgSCHCmnDlHqPResetTemp(hqP)
5259 RgSchDlHqProcCb *hqP;
5263 TRC2(rgSCHCmnDlHqPResetTemp);
5265 /* Fix: syed having a hqP added to Lists for RB assignment rather than
5266 * a UE, as adding UE was limiting handling some scenarios */
5267 hqP->reqLnk.node = (PTR)NULLP;
5268 hqP->schdLstLnk.node = (PTR)NULLP;
5271 } /* rgSCHCmnDlHqPResetTemp */
5273 /***********************************************************
5275 * Func : rgSCHCmnDlUeResetTemp
5277 * Desc : Reset whatever variables where temporarily used
5278 * during UE scheduling.
5286 **********************************************************/
5288 PUBLIC Void rgSCHCmnDlUeResetTemp
5291 RgSchDlHqProcCb *hqP
5294 PUBLIC Void rgSCHCmnDlUeResetTemp(ue, hqP)
5296 RgSchDlHqProcCb *hqP;
5299 RgSchDlRbAlloc *allocInfo;
5300 RgSchCmnDlUe *cmnUe = RG_SCH_CMN_GET_DL_UE(ue,hqP->hqE->cell);
5305 TRC2(rgSCHCmnDlUeResetTemp);
5307 /* Fix : syed check for UE's existence was useless.
5308 * Instead we need to check that reset is done only for the
5309 * information of a scheduled harq proc, which is cmnUe->proc.
5310 * Reset should not be done for non-scheduled hqP */
5311 if((cmnUe->proc == hqP) || (cmnUe->proc == NULLP))
5313 cmnUe->proc = NULLP;
5314 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, hqP->hqE->cell);
5316 tmpCb = allocInfo->laaCb;
5318 cmMemset((U8 *)allocInfo, (U8)0, sizeof(RgSchDlRbAlloc));
5319 allocInfo->rnti = ue->ueId;
5321 allocInfo->laaCb = tmpCb;
5323 /* Fix: syed moving this to a common function for both scheduled
5324 * and non-scheduled UEs */
5325 cmnUe->outStndAlloc = 0;
5327 rgSCHCmnDlHqPResetTemp(hqP);
5330 } /* rgSCHCmnDlUeResetTemp */
5332 /***********************************************************
5334 * Func : rgSCHCmnUlUeResetTemp
5336 * Desc : Reset whatever variables where temporarily used
5337 * during UE scheduling.
5345 **********************************************************/
5347 PUBLIC Void rgSCHCmnUlUeResetTemp
5353 PUBLIC Void rgSCHCmnUlUeResetTemp(cell, ue)
5358 RgSchCmnUlUe *cmnUlUe = RG_SCH_CMN_GET_UL_UE(ue,cell);
5360 TRC2(rgSCHCmnUlUeResetTemp);
5362 cmMemset((U8 *)&cmnUlUe->alloc, (U8)0, sizeof(cmnUlUe->alloc));
5365 } /* rgSCHCmnUlUeResetTemp */
5370 * @brief This function fills the PDCCH information from dlProc.
5374 * Function: rgSCHCmnFillPdcch
5375 * Purpose: This function fills in the PDCCH information
5376 * obtained from the RgSchDlRbAlloc
5377 * during common channel scheduling(P, SI, RA - RNTI's).
5379 * Invoked by: Downlink Scheduler
5381 * @param[out] RgSchPdcch* pdcch
5382 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5387 PUBLIC Void rgSCHCmnFillPdcch
5391 RgSchDlRbAlloc *rbAllocInfo
5394 PUBLIC Void rgSCHCmnFillPdcch(cell, pdcch, rbAllocInfo)
5397 RgSchDlRbAlloc *rbAllocInfo;
5401 TRC2(rgSCHCmnFillPdcch);
5403 /* common channel pdcch filling,
5404 * only 1A and Local is supported */
5405 pdcch->rnti = rbAllocInfo->rnti;
5406 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
5407 switch(rbAllocInfo->dciFormat)
5409 #ifdef RG_5GTF /* ANOOP: ToDo: DCI format B1/B2 filling */
5410 case TFU_DCI_FORMAT_B1:
5413 pdcch->dci.u.formatB1Info.formatType = 0;
5414 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].cmnGrnt.xPDSCHRange;
5415 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].cmnGrnt.rbAssign;
5416 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = 0;
5417 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5418 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = 0;
5419 //pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].ndi;
5420 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].cmnGrnt.rv;
5421 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5422 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5423 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5424 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5425 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5426 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5427 //TODO_SID: Need to update
5428 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5429 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5430 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5431 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5432 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5433 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5434 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].cmnGrnt.SCID;
5435 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5436 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5437 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5439 break; /* case TFU_DCI_FORMAT_B1: */
5442 case TFU_DCI_FORMAT_B2:
5444 //printf(" RG_5GTF:: Pdcch filling with DCI format B2\n");
5446 break; /* case TFU_DCI_FORMAT_B2: */
5449 case TFU_DCI_FORMAT_1A:
5450 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
5452 /*Nprb indication at PHY for common Ch
5453 *setting least significant bit of tpc field to 1 if
5454 nPrb=3 and 0 otherwise. */
5455 if (rbAllocInfo->nPrb == 3)
5457 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 1;
5461 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 0;
5463 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
5464 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
5465 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
5466 rbAllocInfo->tbInfo[0].imcs;
5467 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = 0;
5468 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = 0;
5470 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
5472 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
5473 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
5474 rbAllocInfo->allocInfo.raType2.rbStart,
5475 rbAllocInfo->allocInfo.raType2.numRb);
5478 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = \
5481 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
5482 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
5485 break; /* case TFU_DCI_FORMAT_1A: */
5486 case TFU_DCI_FORMAT_1:
5487 pdcch->dci.u.format1Info.tpcCmd = 0;
5488 /* Avoiding this check,as we dont support Type1 RA */
5490 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
5493 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
5494 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
5495 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
5497 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
5498 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
5500 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
5501 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
5503 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
5504 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
5508 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
5509 pdcch->dci.u.format1Info.allocInfo.ndi = 0;
5510 pdcch->dci.u.format1Info.allocInfo.mcs = rbAllocInfo->tbInfo[0].imcs;
5511 pdcch->dci.u.format1Info.allocInfo.rv = 0;
5513 pdcch->dci.u.format1Info.dai = 1;
5517 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Allocator's icorrect "
5518 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
5526 * @brief This function finds whether the subframe is special subframe or not.
5530 * Function: rgSCHCmnIsSplSubfrm
5531 * Purpose: This function finds the subframe index of the special subframe
5532 * and finds whether the current DL index matches it or not.
5534 * Invoked by: Scheduler
5536 * @param[in] U8 splfrmCnt
5537 * @param[in] U8 curSubfrmIdx
5538 * @param[in] U8 periodicity
5539 * @param[in] RgSchTddSubfrmInfo *subfrmInfo
5544 PRIVATE Bool rgSCHCmnIsSplSubfrm
5549 RgSchTddSubfrmInfo *subfrmInfo
5552 PRIVATE Bool rgSCHCmnIsSplSubfrm(splfrmCnt, curSubfrmIdx, periodicity, subfrmInfo)
5556 RgSchTddSubfrmInfo *subfrmInfo;
5562 TRC2(rgSCHCmnIsSplSubfrm);
5566 if(periodicity == RG_SCH_CMN_5_MS_PRD)
5570 dlSfCnt = ((splfrmCnt-1)/2) *\
5571 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5572 dlSfCnt = dlSfCnt + subfrmInfo->numFrmHf1;
5576 dlSfCnt = (splfrmCnt/2) * \
5577 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5582 dlSfCnt = splfrmCnt * subfrmInfo->numFrmHf1;
5584 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1 +\
5585 (periodicity*splfrmCnt - dlSfCnt);
5589 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1;
5592 if(splfrmIdx == curSubfrmIdx)
5601 * @brief This function updates DAI or UL index.
5605 * Function: rgSCHCmnUpdHqAndDai
5606 * Purpose: Updates the DAI based on UL-DL Configuration
5607 * index and UE. It also updates the HARQ feedback
5608 * time and 'm' index.
5612 * @param[in] RgDlHqProcCb *hqP
5613 * @param[in] RgSchDlSf *subFrm
5614 * @param[in] RgSchDlHqTbCb *tbCb
5615 * @param[in] U8 tbAllocIdx
5620 PRIVATE Void rgSCHCmnUpdHqAndDai
5622 RgSchDlHqProcCb *hqP,
5624 RgSchDlHqTbCb *tbCb,
5628 PRIVATE Void rgSCHCmnUpdHqAndDai(hqP, subFrm, tbCb,tbAllocIdx)
5629 RgSchDlHqProcCb *hqP;
5631 RgSchDlHqTbCb *tbCb;
5635 RgSchUeCb *ue = hqP->hqE->ue;
5637 TRC2(rgSCHCmnUpdHqAndDai);
5641 /* set the time at which UE shall send the feedback
5642 * for this process */
5643 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5644 subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5645 tbCb->fdbkTime.subframe = subFrm->dlFdbkInfo.subframe;
5646 tbCb->m = subFrm->dlFdbkInfo.m;
5650 /* set the time at which UE shall send the feedback
5651 * for this process */
5652 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5653 hqP->subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5654 tbCb->fdbkTime.subframe = hqP->subFrm->dlFdbkInfo.subframe;
5655 tbCb->m = hqP->subFrm->dlFdbkInfo.m;
5658 /* ccpu00132340-MOD- DAI need to be updated for first TB only*/
5659 if(ue && !tbAllocIdx)
5661 Bool havePdcch = (tbCb->hqP->pdcch ? TRUE : FALSE);
5664 dlDai = rgSCHCmnUpdDai(ue, &tbCb->fdbkTime, tbCb->m, havePdcch,tbCb->hqP,
5667 {/* Non SPS occasions */
5668 tbCb->hqP->pdcch->dlDai = dlDai;
5669 /* hqP->ulDai is used for N1 resource filling
5670 * when SPS occaions present in a bundle */
5671 tbCb->hqP->ulDai = tbCb->dai;
5672 tbCb->hqP->dlDai = dlDai;
5676 /* Updatijng pucchFdbkIdx for both PUCCH or PUSCH
5678 tbCb->pucchFdbkIdx = tbCb->hqP->ulDai;
5685 * @brief This function updates DAI or UL index.
5689 * Function: rgSCHCmnUpdDai
5690 * Purpose: Updates the DAI in the ack-nack info, a valid
5691 * ue should be passed
5695 * @param[in] RgDlHqProcCb *hqP
5696 * @param[in] RgSchDlSf *subFrm
5697 * @param[in] RgSchDlHqTbCb *tbCb
5702 PUBLIC U8 rgSCHCmnUpdDai
5705 CmLteTimingInfo *fdbkTime,
5708 RgSchDlHqProcCb *hqP,
5712 PUBLIC U8 rgSCHCmnUpdDai(ue, fdbkTime, m, havePdcch,tbCb,servCellId,hqP,ulDai)
5714 CmLteTimingInfo *fdbkTime;
5717 RgSchDlHqProcCb *hqP;
5721 RgSchTddANInfo *anInfo;
5723 U8 ackNackFdbkArrSize;
5726 TRC2(rgSCHCmnUpdDai);
5731 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
5732 hqP->hqE->cell->cellId,
5735 servCellIdx = RGSCH_PCELL_INDEX;
5737 ackNackFdbkArrSize = hqP->hqE->cell->ackNackFdbkArrSize;
5739 {/* SPS on primary cell */
5740 servCellIdx = RGSCH_PCELL_INDEX;
5741 ackNackFdbkArrSize = ue->cell->ackNackFdbkArrSize;
5745 anInfo = rgSCHUtlGetUeANFdbkInfo(ue, fdbkTime,servCellIdx);
5747 /* If no ACK/NACK feedback already present, create a new one */
5750 anInfo = &ue->cellInfo[servCellIdx]->anInfo[ue->cellInfo[servCellIdx]->nextFreeANIdx];
5751 anInfo->sfn = fdbkTime->sfn;
5752 anInfo->subframe = fdbkTime->subframe;
5753 anInfo->latestMIdx = m;
5754 /* Fixing DAI value - ccpu00109162 */
5755 /* Handle TDD case as in MIMO definition of the function */
5761 anInfo->isSpsOccasion = FALSE;
5762 /* set the free Index to store Ack/Nack Information*/
5763 ue->cellInfo[servCellIdx]->nextFreeANIdx = (ue->cellInfo[servCellIdx]->nextFreeANIdx + 1) %
5769 anInfo->latestMIdx = m;
5770 /* Fixing DAI value - ccpu00109162 */
5771 /* Handle TDD case as in MIMO definition of the function */
5772 anInfo->ulDai = anInfo->ulDai + 1;
5775 anInfo->dlDai = anInfo->dlDai + 1;
5779 /* ignoring the Scell check,
5780 * for primary cell this field is unused*/
5783 anInfo->n1ResTpcIdx = hqP->tpc;
5787 {/* As this not required for release pdcch */
5788 *ulDai = anInfo->ulDai;
5791 RETVALUE(anInfo->dlDai);
5794 #endif /* ifdef LTE_TDD */
5796 PUBLIC U32 rgHqRvRetxCnt[4][2];
5797 PUBLIC U32 rgUlrate_grant;
5800 * @brief This function fills the HqP TB with rbAllocInfo.
5804 * Function: rgSCHCmnFillHqPTb
5805 * Purpose: This function fills in the HqP TB with rbAllocInfo.
5807 * Invoked by: rgSCHCmnFillHqPTb
5809 * @param[in] RgSchCellCb* cell
5810 * @param[in] RgSchDlRbAlloc *rbAllocInfo,
5811 * @param[in] U8 tbAllocIdx
5812 * @param[in] RgSchPdcch *pdcch
5818 PUBLIC Void rgSCHCmnFillHqPTb
5821 RgSchDlRbAlloc *rbAllocInfo,
5826 PUBLIC Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5828 RgSchDlRbAlloc *rbAllocInfo;
5834 PRIVATE Void rgSCHCmnFillHqPTb
5837 RgSchDlRbAlloc *rbAllocInfo,
5842 PRIVATE Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5844 RgSchDlRbAlloc *rbAllocInfo;
5848 #endif /* LTEMAC_SPS */
5850 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
5851 RgSchDlTbAllocInfo *tbAllocInfo = &rbAllocInfo->tbInfo[tbAllocIdx];
5852 RgSchDlHqTbCb *tbInfo = tbAllocInfo->tbCb;
5853 RgSchDlHqProcCb *hqP = tbInfo->hqP;
5855 TRC2(rgSCHCmnFillHqPTb);
5857 /*ccpu00120365-ADD-if tb is disabled, set mcs=0,rv=1.
5858 * Relevant for DCI format 2 & 2A as per 36.213-7.1.7.2
5860 if ( tbAllocInfo->isDisabled)
5863 tbInfo->dlGrnt.iMcs = 0;
5864 tbInfo->dlGrnt.rv = 1;
5866 /* Fill for TB retransmission */
5867 else if (tbInfo->txCntr > 0)
5870 tbInfo->timingInfo = cmnCellDl->time;
5872 if ((tbInfo->isAckNackDtx == TFU_HQFDB_DTX))
5874 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5875 rgHqRvRetxCnt[tbInfo->dlGrnt.rv][tbInfo->tbIdx]++;
5879 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[++(tbInfo->ccchSchdInfo.rvIdx) & 0x03];
5882 /* fill the scheduler information of hqProc */
5883 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5884 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx,hqP->tbInfo,tbInfo->tbIdx );
5885 rgSCHDhmHqTbRetx(hqP->hqE, tbInfo->timingInfo, hqP, tbInfo->tbIdx);
5887 /* Fill for TB transmission */
5890 /* Fill the HqProc */
5891 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5892 tbInfo->tbSz = tbAllocInfo->bytesAlloc;
5893 tbInfo->timingInfo = cmnCellDl->time;
5895 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[0];
5896 /* fill the scheduler information of hqProc */
5897 tbInfo->ccchSchdInfo.rvIdx = 0;
5898 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5899 /* DwPts Scheduling Changes Start */
5900 /* DwPts Scheduling Changes End */
5901 cell->measurements.dlBytesCnt += tbAllocInfo->bytesAlloc;
5904 /*ccpu00120365:-ADD-only add to subFrm list if tb is not disabled */
5905 if ( tbAllocInfo->isDisabled == FALSE )
5907 /* Set the number of transmitting SM layers for this TB */
5908 tbInfo->numLyrs = tbAllocInfo->noLyr;
5909 /* Set the TB state as WAITING to indicate TB has been
5910 * considered for transmission */
5911 tbInfo->state = HQ_TB_WAITING;
5912 hqP->subFrm = rbAllocInfo->dlSf;
5913 tbInfo->hqP->pdcch = pdcch;
5914 //tbInfo->dlGrnt.numRb = rbAllocInfo->rbsAlloc;
5915 rgSCHUtlDlHqPTbAddToTx(hqP->subFrm, hqP, tbInfo->tbIdx);
5921 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
5925 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
5926 * Purpose: This function fills in the PDCCH information
5927 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
5928 * for dedicated service scheduling. It also
5929 * obtains TPC to be filled in from the power module.
5930 * Assign the PDCCH to HQProc.
5932 * Invoked by: Downlink Scheduler
5934 * @param[in] RgSchCellCb* cell
5935 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5936 * @param[in] RgDlHqProc* hqP
5937 * @param[out] RgSchPdcch *pdcch
5943 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2
5946 RgSchDlRbAlloc *rbAllocInfo,
5947 RgSchDlHqProcCb *hqP,
5952 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, pdcch, tpc)
5954 RgSchDlRbAlloc *rbAllocInfo;
5955 RgSchDlHqProcCb *hqP;
5961 TRC2(rgSCHCmnFillHqPPdcchDciFrmtB1B2)
5963 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
5964 //Currently hardcoding values here.
5965 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
5966 switch(rbAllocInfo->dciFormat)
5968 case TFU_DCI_FORMAT_B1:
5970 pdcch->dci.u.formatB1Info.formatType = 0;
5971 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
5972 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
5973 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
5974 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5975 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
5976 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
5977 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5978 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5979 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5980 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5981 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5982 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5983 //TODO_SID: Need to update
5984 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5985 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5986 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5987 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5988 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5989 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5990 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
5991 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5992 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5993 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5996 case TFU_DCI_FORMAT_B2:
5998 pdcch->dci.u.formatB2Info.formatType = 1;
5999 pdcch->dci.u.formatB2Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
6000 pdcch->dci.u.formatB2Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
6001 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
6002 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
6003 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6004 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6005 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
6006 pdcch->dci.u.formatB2Info.CSI_BSI_BRI_Req = 0;
6007 pdcch->dci.u.formatB2Info.CSIRS_BRRS_TxTiming = 0;
6008 pdcch->dci.u.formatB2Info.CSIRS_BRRS_SymbIdx = 0;
6009 pdcch->dci.u.formatB2Info.CSIRS_BRRS_ProcInd = 0;
6010 pdcch->dci.u.formatB2Info.xPUCCH_TxTiming = 0;
6011 //TODO_SID: Need to update
6012 pdcch->dci.u.formatB2Info.freqResIdx_xPUCCH = 0;
6013 pdcch->dci.u.formatB2Info.beamSwitch = 0;
6014 pdcch->dci.u.formatB2Info.SRS_Config = 0;
6015 pdcch->dci.u.formatB2Info.SRS_Symbol = 0;
6016 //TODO_SID: Need to check.Currently setting 4(2 layer, ports(8,9) w/o OCC).
6017 pdcch->dci.u.formatB2Info.AntPorts_numLayers = 4;
6018 pdcch->dci.u.formatB2Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
6019 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
6020 pdcch->dci.u.formatB2Info.tpcCmd = 1; //tpc;
6021 pdcch->dci.u.formatB2Info.DL_PCRS = 0;
6025 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId," 5GTF_ERROR Allocator's icorrect "
6026 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
6033 extern U32 totPcellSCell;
6034 extern U32 addedForScell;
6035 extern U32 addedForScell1;
6036 extern U32 addedForScell2;
6038 * @brief This function fills the PDCCH information from dlProc.
6042 * Function: rgSCHCmnFillHqPPdcch
6043 * Purpose: This function fills in the PDCCH information
6044 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6045 * for dedicated service scheduling. It also
6046 * obtains TPC to be filled in from the power module.
6047 * Assign the PDCCH to HQProc.
6049 * Invoked by: Downlink Scheduler
6051 * @param[in] RgSchCellCb* cell
6052 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6053 * @param[in] RgDlHqProc* hqP
6058 PUBLIC Void rgSCHCmnFillHqPPdcch
6061 RgSchDlRbAlloc *rbAllocInfo,
6062 RgSchDlHqProcCb *hqP
6065 PUBLIC Void rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP)
6067 RgSchDlRbAlloc *rbAllocInfo;
6068 RgSchDlHqProcCb *hqP;
6071 RgSchCmnDlCell *cmnCell = RG_SCH_CMN_GET_DL_CELL(cell);
6072 RgSchPdcch *pdcch = rbAllocInfo->pdcch;
6075 TRC2(rgSCHCmnFillHqPPdcch);
6080 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6087 tpc = rgSCHPwrPucchTpcForUe(cell, hqP->hqE->ue);
6089 /* Fix: syed moving this to a common function for both scheduled
6090 * and non-scheduled UEs */
6092 pdcch->ue = hqP->hqE->ue;
6093 if (hqP->hqE->ue->csgMmbrSta == FALSE)
6095 cmnCell->ncsgPrbCnt += rbAllocInfo->rbsAlloc;
6097 cmnCell->totPrbCnt += rbAllocInfo->rbsAlloc;
6100 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlPrbUsg +=
6101 rbAllocInfo->rbsAlloc;
6102 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw0iTbs +=
6103 rbAllocInfo->tbInfo[0].iTbs;
6104 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw0iTbs ++;
6105 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6106 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6109 totPcellSCell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6110 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6112 addedForScell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6113 addedForScell1 += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6115 printf (" Hqp %d cell %d addedForScell %lu addedForScell1 %lu sfn:sf %d:%d \n",
6117 hqP->hqE->cell->cellId,
6121 cell->crntTime.slot);
6125 hqP->hqE->cell->tenbStats->sch.dlPrbUsage[0] +=
6126 rbAllocInfo->rbsAlloc;
6127 hqP->hqE->cell->tenbStats->sch.dlSumCw0iTbs +=
6128 rbAllocInfo->tbInfo[0].iTbs;
6129 hqP->hqE->cell->tenbStats->sch.dlNumCw0iTbs ++;
6130 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6131 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6132 if (rbAllocInfo->tbInfo[1].schdlngForTb)
6134 hqP->hqE->cell->tenbStats->sch.dlSumCw1iTbs +=
6135 rbAllocInfo->tbInfo[1].iTbs;
6136 hqP->hqE->cell->tenbStats->sch.dlNumCw1iTbs ++;
6137 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw1iTbs +=
6138 rbAllocInfo->tbInfo[1].iTbs;
6139 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw1iTbs ++;
6140 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6141 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6145 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6147 addedForScell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6148 addedForScell2 += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6150 printf (" Hqp %d cell %d addedForScell %lu addedForScell2 %lu \n",
6152 hqP->hqE->cell->cellId,
6157 totPcellSCell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6161 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6162 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6165 printf ("add DL TPT is %lu sfn:sf %d:%d \n", hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt ,
6167 cell->crntTime.slot);
6173 pdcch->rnti = rbAllocInfo->rnti;
6174 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
6175 /* Update subframe and pdcch info in HqTb control block */
6176 switch(rbAllocInfo->dciFormat)
6179 case TFU_DCI_FORMAT_B1:
6180 case TFU_DCI_FORMAT_B2:
6182 // printf(" RG_5GTF:: Pdcch filling with DCI format B1/B2\n");
6183 rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, \
6189 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6190 "Allocator's incorrect dciForamt Fill for RNTI:%d",rbAllocInfo->rnti);
6197 * @brief This function fills the PDCCH DCI format 1 information from dlProc.
6201 * Function: rgSCHCmnFillHqPPdcchDciFrmt1
6202 * Purpose: This function fills in the PDCCH information
6203 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6204 * for dedicated service scheduling. It also
6205 * obtains TPC to be filled in from the power module.
6206 * Assign the PDCCH to HQProc.
6208 * Invoked by: Downlink Scheduler
6210 * @param[in] RgSchCellCb* cell
6211 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6212 * @param[in] RgDlHqProc* hqP
6213 * @param[out] RgSchPdcch *pdcch
6220 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1
6223 RgSchDlRbAlloc *rbAllocInfo,
6224 RgSchDlHqProcCb *hqP,
6229 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1(cell, rbAllocInfo, hqP, pdcch, tpc)
6231 RgSchDlRbAlloc *rbAllocInfo;
6232 RgSchDlHqProcCb *hqP;
6239 RgSchTddANInfo *anInfo;
6243 /* For activation or reactivation,
6244 * Harq ProcId should be 0 */
6245 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6248 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1)
6250 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6251 pdcch->dci.u.format1Info.tpcCmd = tpc;
6252 /* Avoiding this check,as we dont support Type1 RA */
6254 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6257 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
6258 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
6259 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6261 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
6262 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6264 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
6265 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6267 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
6268 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6273 if ((!(hqP->tbInfo[0].txCntr)) &&
6274 (cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6275 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6276 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV)))
6279 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6283 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6286 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6289 pdcch->dci.u.format1Info.allocInfo.ndi =
6290 rbAllocInfo->tbInfo[0].tbCb->ndi;
6291 pdcch->dci.u.format1Info.allocInfo.mcs =
6292 rbAllocInfo->tbInfo[0].imcs;
6293 pdcch->dci.u.format1Info.allocInfo.rv =
6294 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6296 if(hqP->hqE->ue != NULLP)
6299 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6300 hqP->hqE->cell->cellId,
6303 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6304 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6306 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6307 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6312 pdcch->dci.u.format1Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6316 /* Fixing DAI value - ccpu00109162 */
6317 pdcch->dci.u.format1Info.dai = RG_SCH_MAX_DAI_IDX;
6323 /* always 0 for RACH */
6324 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6326 /* Fixing DAI value - ccpu00109162 */
6327 pdcch->dci.u.format1Info.dai = 1;
6336 * @brief This function fills the PDCCH DCI format 1A information from dlProc.
6340 * Function: rgSCHCmnFillHqPPdcchDciFrmt1A
6341 * Purpose: This function fills in the PDCCH information
6342 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6343 * for dedicated service scheduling. It also
6344 * obtains TPC to be filled in from the power module.
6345 * Assign the PDCCH to HQProc.
6347 * Invoked by: Downlink Scheduler
6349 * @param[in] RgSchCellCb* cell
6350 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6351 * @param[in] RgDlHqProc* hqP
6352 * @param[out] RgSchPdcch *pdcch
6358 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A
6361 RgSchDlRbAlloc *rbAllocInfo,
6362 RgSchDlHqProcCb *hqP,
6367 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A(cell, rbAllocInfo, hqP, pdcch, tpc)
6369 RgSchDlRbAlloc *rbAllocInfo;
6370 RgSchDlHqProcCb *hqP;
6377 RgSchTddANInfo *anInfo;
6381 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6384 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1A)
6386 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6387 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
6388 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = tpc;
6389 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
6390 rbAllocInfo->tbInfo[0].imcs;
6391 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = TRUE;
6393 if ((!(hqP->tbInfo[0].txCntr)) &&
6394 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6395 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6396 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6399 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val = 0;
6403 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val
6407 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val =
6410 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = \
6411 rbAllocInfo->tbInfo[0].tbCb->ndi;
6412 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = \
6413 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6414 /* As of now, we do not support Distributed allocations */
6415 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
6416 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
6417 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
6419 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
6420 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6421 rbAllocInfo->allocInfo.raType2.rbStart,
6422 rbAllocInfo->allocInfo.raType2.numRb);
6424 if(hqP->hqE->ue != NULLP)
6427 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6428 hqP->hqE->cell->cellId,
6430 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6431 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6433 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6434 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6437 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6440 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val =
6441 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6445 /* Fixing DAI value - ccpu00109162 */
6446 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = RG_SCH_MAX_DAI_IDX;
6447 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6448 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6455 /* always 0 for RACH */
6456 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres
6459 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6460 /* Fixing DAI value - ccpu00109162 */
6461 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
6469 * @brief This function fills the PDCCH DCI format 1B information from dlProc.
6473 * Function: rgSCHCmnFillHqPPdcchDciFrmt1B
6474 * Purpose: This function fills in the PDCCH information
6475 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6476 * for dedicated service scheduling. It also
6477 * obtains TPC to be filled in from the power module.
6478 * Assign the PDCCH to HQProc.
6480 * Invoked by: Downlink Scheduler
6482 * @param[in] RgSchCellCb* cell
6483 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6484 * @param[in] RgDlHqProc* hqP
6485 * @param[out] RgSchPdcch *pdcch
6491 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B
6494 RgSchDlRbAlloc *rbAllocInfo,
6495 RgSchDlHqProcCb *hqP,
6500 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B(cell, rbAllocInfo, hqP, pdcch, tpc)
6502 RgSchDlRbAlloc *rbAllocInfo;
6503 RgSchDlHqProcCb *hqP;
6510 RgSchTddANInfo *anInfo;
6514 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6517 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1B)
6519 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6520 pdcch->dci.u.format1bInfo.tpcCmd = tpc;
6521 pdcch->dci.u.format1bInfo.allocInfo.mcs = \
6522 rbAllocInfo->tbInfo[0].imcs;
6524 if ((!(hqP->tbInfo[0].txCntr)) &&
6525 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6526 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6527 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6530 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = 0;
6534 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6537 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6539 pdcch->dci.u.format1bInfo.allocInfo.ndi = \
6540 rbAllocInfo->tbInfo[0].tbCb->ndi;
6541 pdcch->dci.u.format1bInfo.allocInfo.rv = \
6542 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6543 /* As of now, we do not support Distributed allocations */
6544 pdcch->dci.u.format1bInfo.allocInfo.isLocal = TRUE;
6545 pdcch->dci.u.format1bInfo.allocInfo.nGap2.pres = NOTPRSNT;
6546 pdcch->dci.u.format1bInfo.allocInfo.alloc.type =
6548 pdcch->dci.u.format1bInfo.allocInfo.alloc.u.riv =
6549 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6550 rbAllocInfo->allocInfo.raType2.rbStart,
6551 rbAllocInfo->allocInfo.raType2.numRb);
6552 /* Fill precoding Info */
6553 pdcch->dci.u.format1bInfo.allocInfo.pmiCfm = \
6554 rbAllocInfo->mimoAllocInfo.precIdxInfo >> 4;
6555 pdcch->dci.u.format1bInfo.allocInfo.tPmi = \
6556 rbAllocInfo->mimoAllocInfo.precIdxInfo & 0x0F;
6558 if(hqP->hqE->ue != NULLP)
6561 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6562 hqP->hqE->cell->cellId,
6564 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6565 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6567 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6568 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6573 pdcch->dci.u.format1bInfo.dai =
6574 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6578 pdcch->dci.u.format1bInfo.dai = RG_SCH_MAX_DAI_IDX;
6579 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6580 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6591 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
6595 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
6596 * Purpose: This function fills in the PDCCH information
6597 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6598 * for dedicated service scheduling. It also
6599 * obtains TPC to be filled in from the power module.
6600 * Assign the PDCCH to HQProc.
6602 * Invoked by: Downlink Scheduler
6604 * @param[in] RgSchCellCb* cell
6605 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6606 * @param[in] RgDlHqProc* hqP
6607 * @param[out] RgSchPdcch *pdcch
6613 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2
6616 RgSchDlRbAlloc *rbAllocInfo,
6617 RgSchDlHqProcCb *hqP,
6622 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2(cell, rbAllocInfo, hqP, pdcch, tpc)
6624 RgSchDlRbAlloc *rbAllocInfo;
6625 RgSchDlHqProcCb *hqP;
6632 RgSchTddANInfo *anInfo;
6636 /* ccpu00119023-ADD-For activation or reactivation,
6637 * Harq ProcId should be 0 */
6638 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6641 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2)
6643 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6644 /*ccpu00120365:-ADD-call also if tb is disabled */
6645 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6646 rbAllocInfo->tbInfo[1].isDisabled)
6648 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6650 pdcch->dci.u.format2Info.tpcCmd = tpc;
6651 /* Avoiding this check,as we dont support Type1 RA */
6653 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6656 pdcch->dci.u.format2Info.allocInfo.isAllocType0 = TRUE;
6657 pdcch->dci.u.format2Info.allocInfo.resAllocMap[0] =
6658 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6660 pdcch->dci.u.format2Info.allocInfo.resAllocMap[1] =
6661 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6663 pdcch->dci.u.format2Info.allocInfo.resAllocMap[2] =
6664 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6666 pdcch->dci.u.format2Info.allocInfo.resAllocMap[3] =
6667 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6672 if ((!(hqP->tbInfo[0].txCntr)) &&
6673 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6674 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6675 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6678 pdcch->dci.u.format2Info.allocInfo.harqProcId = 0;
6682 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6685 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6687 /* Initialize the TB info for both the TBs */
6688 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].mcs = 0;
6689 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].rv = 1;
6690 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].mcs = 0;
6691 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].rv = 1;
6692 /* Fill tbInfo for scheduled TBs */
6693 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6694 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6695 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6696 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6697 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6698 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6699 /* If we reach this function. It is safely assumed that
6700 * rbAllocInfo->tbInfo[0] always has non default valid values.
6701 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6702 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6704 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6705 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6706 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6707 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6708 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6709 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6711 pdcch->dci.u.format2Info.allocInfo.transSwap =
6712 rbAllocInfo->mimoAllocInfo.swpFlg;
6713 pdcch->dci.u.format2Info.allocInfo.precoding =
6714 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6716 if(hqP->hqE->ue != NULLP)
6720 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6721 hqP->hqE->cell->cellId,
6723 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6724 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6726 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6727 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6732 pdcch->dci.u.format2Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6736 pdcch->dci.u.format2Info.dai = RG_SCH_MAX_DAI_IDX;
6737 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6738 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6748 * @brief This function fills the PDCCH DCI format 2A information from dlProc.
6752 * Function: rgSCHCmnFillHqPPdcchDciFrmt2A
6753 * Purpose: This function fills in the PDCCH information
6754 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6755 * for dedicated service scheduling. It also
6756 * obtains TPC to be filled in from the power module.
6757 * Assign the PDCCH to HQProc.
6759 * Invoked by: Downlink Scheduler
6761 * @param[in] RgSchCellCb* cell
6762 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6763 * @param[in] RgDlHqProc* hqP
6764 * @param[out] RgSchPdcch *pdcch
6770 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A
6773 RgSchDlRbAlloc *rbAllocInfo,
6774 RgSchDlHqProcCb *hqP,
6779 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A(cell, rbAllocInfo, hqP, pdcch, tpc)
6781 RgSchDlRbAlloc *rbAllocInfo;
6782 RgSchDlHqProcCb *hqP;
6788 RgSchTddANInfo *anInfo;
6792 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6795 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2A)
6797 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6798 /*ccpu00120365:-ADD-call also if tb is disabled */
6799 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6800 rbAllocInfo->tbInfo[1].isDisabled)
6803 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6806 pdcch->dci.u.format2AInfo.tpcCmd = tpc;
6807 /* Avoiding this check,as we dont support Type1 RA */
6809 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6812 pdcch->dci.u.format2AInfo.allocInfo.isAllocType0 = TRUE;
6813 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[0] =
6814 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6816 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[1] =
6817 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6819 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[2] =
6820 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6822 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[3] =
6823 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6828 if ((!(hqP->tbInfo[0].txCntr)) &&
6829 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6830 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6831 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6834 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = 0;
6838 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6841 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6843 /* Initialize the TB info for both the TBs */
6844 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].mcs = 0;
6845 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].rv = 1;
6846 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].mcs = 0;
6847 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].rv = 1;
6848 /* Fill tbInfo for scheduled TBs */
6849 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6850 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6851 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6852 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6853 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6854 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6855 /* If we reach this function. It is safely assumed that
6856 * rbAllocInfo->tbInfo[0] always has non default valid values.
6857 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6859 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6861 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6862 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6863 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6864 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6865 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6866 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6869 pdcch->dci.u.format2AInfo.allocInfo.transSwap =
6870 rbAllocInfo->mimoAllocInfo.swpFlg;
6871 pdcch->dci.u.format2AInfo.allocInfo.precoding =
6872 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6874 if(hqP->hqE->ue != NULLP)
6877 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6878 hqP->hqE->cell->cellId,
6880 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6881 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6883 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6884 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6889 pdcch->dci.u.format2AInfo.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6893 pdcch->dci.u.format2AInfo.dai = RG_SCH_MAX_DAI_IDX;
6894 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6895 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6907 * @brief init of Sch vars.
6911 * Function: rgSCHCmnInitVars
6912 Purpose: Initialization of various UL subframe indices
6914 * @param[in] RgSchCellCb *cell
6919 PRIVATE Void rgSCHCmnInitVars
6924 PRIVATE Void rgSCHCmnInitVars(cell)
6928 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6930 TRC2(rgSCHCmnInitVars);
6932 cellUl->idx = RGSCH_INVALID_INFO;
6933 cellUl->schdIdx = RGSCH_INVALID_INFO;
6934 cellUl->schdHqProcIdx = RGSCH_INVALID_INFO;
6935 cellUl->msg3SchdIdx = RGSCH_INVALID_INFO;
6937 cellUl->emtcMsg3SchdIdx = RGSCH_INVALID_INFO;
6939 cellUl->msg3SchdHqProcIdx = RGSCH_INVALID_INFO;
6940 cellUl->rcpReqIdx = RGSCH_INVALID_INFO;
6941 cellUl->hqFdbkIdx[0] = RGSCH_INVALID_INFO;
6942 cellUl->hqFdbkIdx[1] = RGSCH_INVALID_INFO;
6943 cellUl->reTxIdx[0] = RGSCH_INVALID_INFO;
6944 cellUl->reTxIdx[1] = RGSCH_INVALID_INFO;
6945 /* Stack Crash problem for TRACE5 Changes. Added the return below */
6952 * @brief Updation of Sch vars per TTI.
6956 * Function: rgSCHCmnUpdVars
6957 * Purpose: Updation of Sch vars per TTI.
6959 * @param[in] RgSchCellCb *cell
6964 PUBLIC Void rgSCHCmnUpdVars
6969 PUBLIC Void rgSCHCmnUpdVars(cell)
6973 CmLteTimingInfo timeInfo;
6974 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6977 TRC2(rgSCHCmnUpdVars);
6979 idx = (cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot);
6980 cellUl->idx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6982 printf("idx %d cellUl->idx %d RGSCH_NUM_SUB_FRAMES_5G %d time(%d %d) \n",idx,cellUl->idx ,RGSCH_NUM_SUB_FRAMES_5G,cell->crntTime.sfn,cell->crntTime.slot);
6984 /* Need to scheduler for after SCHED_DELTA */
6985 /* UL allocation has been advanced by 1 subframe
6986 * so that we do not wrap around and send feedback
6987 * before the data is even received by the PHY */
6988 /* Introduced timing delta for UL control */
6989 idx = (cellUl->idx + TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA);
6990 cellUl->schdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6992 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
6993 TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA)
6994 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
6996 /* ccpu00127193 filling schdTime for logging and enhancement purpose*/
6997 cellUl->schdTime = timeInfo;
6999 /* msg3 scheduling two subframes after general scheduling */
7000 idx = (cellUl->idx + RG_SCH_CMN_DL_DELTA + RGSCH_RARSP_MSG3_DELTA);
7001 cellUl->msg3SchdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7003 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
7004 RG_SCH_CMN_DL_DELTA+ RGSCH_RARSP_MSG3_DELTA)
7005 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
7007 idx = (cellUl->idx + TFU_RECPREQ_DLDELTA);
7009 cellUl->rcpReqIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7011 /* Downlink harq feedback is sometime after data reception / harq failure */
7012 /* Since feedback happens prior to scheduling being called, we add 1 to */
7013 /* take care of getting the correct subframe for feedback */
7014 idx = (cellUl->idx - TFU_CRCIND_ULDELTA + RG_SCH_CMN_UL_NUM_SF);
7016 printf("Finally setting cellUl->hqFdbkIdx[0] = %d TFU_CRCIND_ULDELTA %d RG_SCH_CMN_UL_NUM_SF %d\n",idx,TFU_CRCIND_ULDELTA,RG_SCH_CMN_UL_NUM_SF);
7018 cellUl->hqFdbkIdx[0] = (idx % (RG_SCH_CMN_UL_NUM_SF));
7020 idx = ((cellUl->schdIdx) % (RG_SCH_CMN_UL_NUM_SF));
7022 cellUl->reTxIdx[0] = (U8) idx;
7024 printf("cellUl->hqFdbkIdx[0] %d cellUl->reTxIdx[0] %d \n",cellUl->hqFdbkIdx[0], cellUl->reTxIdx[0] );
7026 /* RACHO: update cmn sched specific RACH variables,
7027 * mainly the prachMaskIndex */
7028 rgSCHCmnUpdRachParam(cell);
7037 * @brief To get uplink subframe index associated with current PHICH
7042 * Function: rgSCHCmnGetPhichUlSfIdx
7043 * Purpose: Gets uplink subframe index associated with current PHICH
7044 * transmission based on SFN and subframe no
7046 * @param[in] CmLteTimingInfo *timeInfo
7047 * @param[in] RgSchCellCb *cell
7052 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx
7054 CmLteTimingInfo *timeInfo,
7058 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx(timeInfo, cell)
7059 CmLteTimingInfo *timeInfo;
7063 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7065 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7071 TRC2(rgSCHCmnGetPhichUlSfIdx);
7073 dlsf = rgSCHUtlSubFrmGet(cell, *timeInfo);
7075 if(dlsf->phichOffInfo.sfnOffset == RGSCH_INVALID_INFO)
7077 RETVALUE(RGSCH_INVALID_INFO);
7079 subframe = dlsf->phichOffInfo.subframe;
7081 sfn = (RGSCH_MAX_SFN + timeInfo->sfn -
7082 dlsf->phichOffInfo.sfnOffset) % RGSCH_MAX_SFN;
7084 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7085 * wrap case such that idx will be proper*/
7086 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7087 numUlSf = ((numUlSf * sfn) + rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subframe]) - 1;
7088 idx = numUlSf % (cellUl->numUlSubfrms);
7094 * @brief To get uplink subframe index.
7099 * Function: rgSCHCmnGetUlSfIdx
7100 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7102 * @param[in] CmLteTimingInfo *timeInfo
7103 * @param[in] U8 ulDlCfgIdx
7108 PUBLIC U8 rgSCHCmnGetUlSfIdx
7110 CmLteTimingInfo *timeInfo,
7114 PUBLIC U8 rgSCHCmnGetUlSfIdx(timeInfo, cell)
7115 CmLteTimingInfo *timeInfo;
7119 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7120 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7124 TRC2(rgSCHCmnGetUlSfIdx);
7126 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7127 * wrap case such that idx will be proper*/
7128 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7129 numUlSf = ((numUlSf * timeInfo->sfn) + \
7130 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->subframe]) - 1;
7131 idx = numUlSf % (cellUl->numUlSubfrms);
7139 * @brief To get uplink hq index.
7144 * Function: rgSCHCmnGetUlHqProcIdx
7145 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7147 * @param[in] CmLteTimingInfo *timeInfo
7148 * @param[in] U8 ulDlCfgIdx
7153 PUBLIC U8 rgSCHCmnGetUlHqProcIdx
7155 CmLteTimingInfo *timeInfo,
7159 PUBLIC U8 rgSCHCmnGetUlHqProcIdx(timeInfo, cell)
7160 CmLteTimingInfo *timeInfo;
7168 numUlSf = (timeInfo->sfn * RGSCH_NUM_SUB_FRAMES_5G + timeInfo->slot);
7169 procId = numUlSf % RGSCH_NUM_UL_HQ_PROC;
7171 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7172 /*ccpu00130639 - MOD - To get correct UL HARQ Proc IDs for all UL/DL Configs*/
7174 S8 sfnCycle = cell->tddHqSfnCycle;
7175 U8 numUlHarq = rgSchTddUlNumHarqProcTbl[ulDlCfgIdx]
7177 /* TRACE 5 Changes */
7178 TRC2(rgSCHCmnGetUlHqProcIdx);
7180 /* Calculate the number of UL SF in one SFN */
7181 numUlSfInSfn = RGSCH_NUM_SUB_FRAMES -
7182 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7184 /* Check for the SFN wrap around case */
7185 if(cell->crntTime.sfn == 1023 && timeInfo->sfn == 0)
7189 else if(cell->crntTime.sfn == 0 && timeInfo->sfn == 1023)
7191 /* sfnCycle decremented by 1 */
7192 sfnCycle = (sfnCycle + numUlHarq-1) % numUlHarq;
7194 /* Calculate the total number of UL sf */
7195 /* -1 is done since uplink sf are counted from 0 */
7196 numUlSf = numUlSfInSfn * (timeInfo->sfn + (sfnCycle*1024)) +
7197 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->slot] - 1;
7199 procId = numUlSf % numUlHarq;
7205 /* UL_ALLOC_CHANGES */
7206 /***********************************************************
7208 * Func : rgSCHCmnUlFreeAlloc
7210 * Desc : Free an allocation - invokes UHM and releases
7211 * alloc for the scheduler
7212 * Doest need subframe as argument
7220 **********************************************************/
7222 PUBLIC Void rgSCHCmnUlFreeAlloc
7228 PUBLIC Void rgSCHCmnUlFreeAlloc(cell, alloc)
7230 RgSchUlAlloc *alloc;
7233 RgSchUlHqProcCb *hqProc;
7234 TRC2(rgSCHCmnUlFreeAllocation);
7238 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7239 if ((alloc->hqProc->remTx == 0) &&
7240 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7243 RgSchRaCb *raCb = alloc->raCb;
7244 rgSCHUhmFreeProc(alloc->hqProc, cell);
7245 rgSCHUtlUlAllocRelease(alloc);
7246 rgSCHRamDelRaCb(cell, raCb, TRUE);
7251 hqProc = alloc->hqProc;
7252 rgSCHUtlUlAllocRelease(alloc);
7253 rgSCHUhmFreeProc(hqProc, cell);
7258 /***********************************************************
7260 * Func : rgSCHCmnUlFreeAllocation
7262 * Desc : Free an allocation - invokes UHM and releases
7263 * alloc for the scheduler
7271 **********************************************************/
7273 PUBLIC Void rgSCHCmnUlFreeAllocation
7280 PUBLIC Void rgSCHCmnUlFreeAllocation(cell, sf, alloc)
7283 RgSchUlAlloc *alloc;
7286 RgSchUlHqProcCb *hqProc;
7288 TRC2(rgSCHCmnUlFreeAllocation);
7292 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7293 if ((alloc->hqProc->remTx == 0) &&
7294 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7297 RgSchRaCb *raCb = alloc->raCb;
7298 rgSCHUhmFreeProc(alloc->hqProc, cell);
7299 rgSCHUtlUlAllocRls(sf, alloc);
7300 rgSCHRamDelRaCb(cell, raCb, TRUE);
7305 hqProc = alloc->hqProc;
7306 rgSCHUhmFreeProc(hqProc, cell);
7308 /* re-setting the PRB count while freeing the allocations */
7311 rgSCHUtlUlAllocRls(sf, alloc);
7317 * @brief This function implements PDCCH allocation for an UE
7318 * in the currently running subframe.
7322 * Function: rgSCHCmnPdcchAllocCrntSf
7323 * Purpose: This function determines current DL subframe
7324 * and UE DL CQI to call the actual pdcch allocator
7326 * Note that this function is called only
7327 * when PDCCH request needs to be made during
7328 * uplink scheduling.
7330 * Invoked by: Scheduler
7332 * @param[in] RgSchCellCb *cell
7333 * @param[in] RgSchUeCb *ue
7334 * @return RgSchPdcch *
7335 * -# NULLP when unsuccessful
7338 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf
7344 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf(cell, ue)
7349 CmLteTimingInfo frm = cell->crntTime;
7350 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7352 RgSchPdcch *pdcch = NULLP;
7354 TRC2(rgSCHCmnPdcchAllocCrntSf);
7355 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7356 sf = rgSCHUtlSubFrmGet(cell, frm);
7359 if (ue->allocCmnUlPdcch)
7361 pdcch = rgSCHCmnCmnPdcchAlloc(cell, sf);
7362 /* Since CRNTI Scrambled */
7365 pdcch->dciNumOfBits = ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
7371 //pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, y, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_0, FALSE);
7372 pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_A1, FALSE);
7377 /***********************************************************
7379 * Func : rgSCHCmnUlAllocFillNdmrs
7381 * Desc : Determines and fills N_dmrs for a UE uplink
7386 * Notes: N_dmrs determination is straightforward, so
7387 * it is configured per subband
7391 **********************************************************/
7393 PUBLIC Void rgSCHCmnUlAllocFillNdmrs
7395 RgSchCmnUlCell *cellUl,
7399 PUBLIC Void rgSCHCmnUlAllocFillNdmrs(cellUl, alloc)
7400 RgSchCmnUlCell *cellUl;
7401 RgSchUlAlloc *alloc;
7404 TRC2(rgSCHCmnUlAllocFillNdmrs);
7405 alloc->grnt.nDmrs = cellUl->dmrsArr[alloc->sbStart];
7409 /***********************************************************
7411 * Func : rgSCHCmnUlAllocLnkHqProc
7413 * Desc : Links a new allocation for an UE with the
7414 * appropriate HARQ process of the UE.
7422 **********************************************************/
7424 PUBLIC Void rgSCHCmnUlAllocLnkHqProc
7427 RgSchUlAlloc *alloc,
7428 RgSchUlHqProcCb *proc,
7432 PUBLIC Void rgSCHCmnUlAllocLnkHqProc(ue, alloc, proc, isRetx)
7434 RgSchUlAlloc *alloc;
7435 RgSchUlHqProcCb *proc;
7439 TRC2(rgSCHCmnUlAllocLnkHqProc);
7443 rgSCHCmnUlAdapRetx(alloc, proc);
7447 #ifdef LTE_L2_MEAS /* L2_COUNTERS */
7450 rgSCHUhmNewTx(proc, (((RgUeUlHqCb*)proc->hqEnt)->maxHqRetx), alloc);
7456 * @brief This function releases a PDCCH in the subframe that is
7457 * currently being allocated for.
7461 * Function: rgSCHCmnPdcchRlsCrntSf
7462 * Purpose: This function determines current DL subframe
7463 * which is considered for PDCCH allocation,
7464 * and then calls the actual function that
7465 * releases a PDCCH in a specific subframe.
7466 * Note that this function is called only
7467 * when PDCCH release needs to be made during
7468 * uplink scheduling.
7470 * Invoked by: Scheduler
7472 * @param[in] RgSchCellCb *cell
7473 * @param[in] RgSchPdcch *pdcch
7477 PUBLIC Void rgSCHCmnPdcchRlsCrntSf
7483 PUBLIC Void rgSCHCmnPdcchRlsCrntSf(cell, pdcch)
7488 CmLteTimingInfo frm = cell->crntTime;
7491 TRC2(rgSCHCmnPdcchRlsCrntSf);
7493 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7494 sf = rgSCHUtlSubFrmGet(cell, frm);
7495 rgSCHUtlPdcchPut(cell, &sf->pdcchInfo, pdcch);
7498 /***********************************************************
7500 * Func : rgSCHCmnUlFillPdcchWithAlloc
7502 * Desc : Fills a PDCCH with format 0 information.
7510 **********************************************************/
7512 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc
7515 RgSchUlAlloc *alloc,
7519 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc(pdcch, alloc, ue)
7521 RgSchUlAlloc *alloc;
7526 TRC2(rgSCHCmnUlFillPdcchWithAlloc);
7529 pdcch->rnti = alloc->rnti;
7530 //pdcch->dci.dciFormat = TFU_DCI_FORMAT_A2;
7531 pdcch->dci.dciFormat = alloc->grnt.dciFrmt;
7533 //Currently hardcoding values here.
7534 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
7535 switch(pdcch->dci.dciFormat)
7537 case TFU_DCI_FORMAT_A1:
7539 pdcch->dci.u.formatA1Info.formatType = 0;
7540 pdcch->dci.u.formatA1Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7541 pdcch->dci.u.formatA1Info.xPUSCH_TxTiming = 0;
7542 pdcch->dci.u.formatA1Info.RBAssign = alloc->grnt.rbAssign;
7543 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7544 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7545 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7546 pdcch->dci.u.formatA1Info.CSI_BSI_BRI_Req = 0;
7547 pdcch->dci.u.formatA1Info.CSIRS_BRRS_TxTiming = 0;
7548 pdcch->dci.u.formatA1Info.CSIRS_BRRS_SymbIdx = 0;
7549 pdcch->dci.u.formatA1Info.CSIRS_BRRS_ProcInd = 0;
7550 pdcch->dci.u.formatA1Info.numBSI_Reports = 0;
7551 pdcch->dci.u.formatA1Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7552 pdcch->dci.u.formatA1Info.beamSwitch = 0;
7553 pdcch->dci.u.formatA1Info.SRS_Config = 0;
7554 pdcch->dci.u.formatA1Info.SRS_Symbol = 0;
7555 pdcch->dci.u.formatA1Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7556 pdcch->dci.u.formatA1Info.SCID = alloc->grnt.SCID;
7557 pdcch->dci.u.formatA1Info.PMI = alloc->grnt.PMI;
7558 pdcch->dci.u.formatA1Info.UL_PCRS = 0;
7559 pdcch->dci.u.formatA1Info.tpcCmd = alloc->grnt.tpc;
7562 case TFU_DCI_FORMAT_A2:
7564 pdcch->dci.u.formatA2Info.formatType = 1;
7565 pdcch->dci.u.formatA2Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7566 pdcch->dci.u.formatA2Info.xPUSCH_TxTiming = 0;
7567 pdcch->dci.u.formatA2Info.RBAssign = alloc->grnt.rbAssign;
7568 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7569 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7570 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7571 pdcch->dci.u.formatA2Info.CSI_BSI_BRI_Req = 0;
7572 pdcch->dci.u.formatA2Info.CSIRS_BRRS_TxTiming = 0;
7573 pdcch->dci.u.formatA2Info.CSIRS_BRRS_SymbIdx = 0;
7574 pdcch->dci.u.formatA2Info.CSIRS_BRRS_ProcInd = 0;
7575 pdcch->dci.u.formatA2Info.numBSI_Reports = 0;
7576 pdcch->dci.u.formatA2Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7577 pdcch->dci.u.formatA2Info.beamSwitch = 0;
7578 pdcch->dci.u.formatA2Info.SRS_Config = 0;
7579 pdcch->dci.u.formatA2Info.SRS_Symbol = 0;
7580 pdcch->dci.u.formatA2Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7581 pdcch->dci.u.formatA2Info.SCID = alloc->grnt.SCID;
7582 pdcch->dci.u.formatA2Info.PMI = alloc->grnt.PMI;
7583 pdcch->dci.u.formatA2Info.UL_PCRS = 0;
7584 pdcch->dci.u.formatA2Info.tpcCmd = alloc->grnt.tpc;
7588 RLOG1(L_ERROR," 5GTF_ERROR UL Allocator's icorrect "
7589 "dciForamt Fill RNTI:%d",alloc->rnti);
7597 /***********************************************************
7599 * Func : rgSCHCmnUlAllocFillTpc
7601 * Desc : Determines and fills TPC for an UE allocation.
7609 **********************************************************/
7611 PUBLIC Void rgSCHCmnUlAllocFillTpc
7618 PUBLIC Void rgSCHCmnUlAllocFillTpc(cell, ue, alloc)
7621 RgSchUlAlloc *alloc;
7624 TRC2(rgSCHCmnUlAllocFillTpc);
7625 alloc->grnt.tpc = rgSCHPwrPuschTpcForUe(cell, ue);
7630 /***********************************************************
7632 * Func : rgSCHCmnAddUeToRefreshQ
7634 * Desc : Adds a UE to refresh queue, so that the UE is
7635 * periodically triggered to refresh it's GBR and
7644 **********************************************************/
7646 PRIVATE Void rgSCHCmnAddUeToRefreshQ
7653 PRIVATE Void rgSCHCmnAddUeToRefreshQ(cell, ue, wait)
7659 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
7661 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
7663 TRC2(rgSCHCmnAddUeToRefreshQ);
7666 cmMemset((U8 *)&arg, 0, sizeof(arg));
7667 arg.tqCp = &sched->tmrTqCp;
7668 arg.tq = sched->tmrTq;
7669 arg.timers = &ueSchd->tmr;
7673 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
7680 * @brief Perform UE reset procedure.
7684 * Function : rgSCHCmnUlUeReset
7686 * This functions performs BSR resetting and
7687 * triggers UL specific scheduler
7688 * to Perform UE reset procedure.
7690 * @param[in] RgSchCellCb *cell
7691 * @param[in] RgSchUeCb *ue
7695 PRIVATE Void rgSCHCmnUlUeReset
7701 PRIVATE Void rgSCHCmnUlUeReset(cell, ue)
7706 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7707 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7709 RgSchCmnLcg *lcgCmn;
7711 RgSchCmnAllocRecord *allRcd;
7712 TRC2(rgSCHCmnUlUeReset);
7714 ue->ul.minReqBytes = 0;
7715 ue->ul.totalBsr = 0;
7717 ue->ul.nonGbrLcgBs = 0;
7718 ue->ul.effAmbr = ue->ul.cfgdAmbr;
7720 node = ueUl->ulAllocLst.first;
7723 allRcd = (RgSchCmnAllocRecord *)node->node;
7727 for(lcgCnt = 0; lcgCnt < RGSCH_MAX_LCG_PER_UE; lcgCnt++)
7729 lcgCmn = RG_SCH_CMN_GET_UL_LCG(&ue->ul.lcgArr[lcgCnt]);
7731 lcgCmn->reportedBs = 0;
7732 lcgCmn->effGbr = lcgCmn->cfgdGbr;
7733 lcgCmn->effDeltaMbr = lcgCmn->deltaMbr;
7735 rgSCHCmnUlUeDelAllocs(cell, ue);
7737 ue->isSrGrant = FALSE;
7739 cellSchd->apisUl->rgSCHUlUeReset(cell, ue);
7741 /* Stack Crash problem for TRACE5 changes. Added the return below */
7747 * @brief RESET UL CQI and DL CQI&RI to conservative values
7748 * for a reestablishing UE.
7752 * Function : rgSCHCmnResetRiCqi
7754 * RESET UL CQI and DL CQI&RI to conservative values
7755 * for a reestablishing UE
7757 * @param[in] RgSchCellCb *cell
7758 * @param[in] RgSchUeCb *ue
7762 PRIVATE Void rgSCHCmnResetRiCqi
7768 PRIVATE Void rgSCHCmnResetRiCqi(cell, ue)
7773 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7774 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
7775 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7776 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7778 TRC2(rgSCHCmnResetRiCqi);
7780 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
7781 cell->isCpUlExtend);
7783 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
7784 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
7785 ueDl->mimoInfo.ri = 1;
7786 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
7787 (ue->mimoInfo.txMode == RGR_UE_TM_6))
7789 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
7791 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
7793 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
7796 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
7798 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
7802 /* Request for an early Aper CQI in case of reest */
7803 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
7804 if(acqiCb && acqiCb->aCqiCfg.pres)
7806 acqiCb->aCqiTrigWt = 0;
7814 * @brief Perform UE reset procedure.
7818 * Function : rgSCHCmnDlUeReset
7820 * This functions performs BO resetting and
7821 * triggers DL specific scheduler
7822 * to Perform UE reset procedure.
7824 * @param[in] RgSchCellCb *cell
7825 * @param[in] RgSchUeCb *ue
7829 PRIVATE Void rgSCHCmnDlUeReset
7835 PRIVATE Void rgSCHCmnDlUeReset(cell, ue)
7840 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7841 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
7842 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7844 TRC2(rgSCHCmnDlUeReset);
7846 if (ueDl->rachInfo.poLnk.node != NULLP)
7848 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
7851 /* Fix: syed Remove from TA List if this UE is there.
7852 * If TA Timer is running. Stop it */
7853 if (ue->dlTaLnk.node)
7855 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
7856 ue->dlTaLnk.node = (PTR)NULLP;
7858 else if (ue->taTmr.tmrEvnt != TMR_NONE)
7860 rgSCHTmrStopTmr(cell, ue->taTmr.tmrEvnt, ue);
7863 cellSchd->apisDl->rgSCHDlUeReset(cell, ue);
7867 rgSCHSCellDlUeReset(cell,ue);
7873 * @brief Perform UE reset procedure.
7877 * Function : rgSCHCmnUeReset
7879 * This functions triggers specific scheduler
7880 * to Perform UE reset procedure.
7882 * @param[in] RgSchCellCb *cell
7883 * @param[in] RgSchUeCb *ue
7889 PUBLIC Void rgSCHCmnUeReset
7895 PUBLIC Void rgSCHCmnUeReset(cell, ue)
7902 RgInfResetHqEnt hqEntRstInfo;
7904 TRC2(rgSCHCmnUeReset);
7905 /* RACHO: remove UE from pdcch, handover and rapId assoc Qs */
7906 rgSCHCmnDelRachInfo(cell, ue);
7908 rgSCHPwrUeReset(cell, ue);
7910 rgSCHCmnUlUeReset(cell, ue);
7911 rgSCHCmnDlUeReset(cell, ue);
7914 /* Making allocCmnUlPdcch TRUE to allocate DCI0/1A from Common search space.
7915 As because multiple cells are added hence 2 bits CqiReq is there
7916 This flag will be set to FALSE once we will get Scell READY */
7917 ue->allocCmnUlPdcch = TRUE;
7920 /* Fix : syed RESET UL CQI and DL CQI&RI to conservative values
7921 * for a reestablishing UE */
7922 /*Reset Cqi Config for all the configured cells*/
7923 for (idx = 0;idx < CM_LTE_MAX_CELLS; idx++)
7925 if (ue->cellInfo[idx] != NULLP)
7927 rgSCHCmnResetRiCqi(ue->cellInfo[idx]->cell, ue);
7930 /*After Reset Trigger APCQI for Pcell*/
7931 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
7932 if(pCellInfo->acqiCb.aCqiCfg.pres)
7934 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
7937 /* sending HqEnt reset to MAC */
7938 hqEntRstInfo.cellId = cell->cellId;
7939 hqEntRstInfo.crnti = ue->ueId;
7941 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
7942 RgSchMacRstHqEnt(&pst,&hqEntRstInfo);
7948 * @brief UE out of MeasGap or AckNackReptn.
7952 * Function : rgSCHCmnActvtUlUe
7954 * This functions triggers specific scheduler
7955 * to start considering it for scheduling.
7957 * @param[in] RgSchCellCb *cell
7958 * @param[in] RgSchUeCb *ue
7964 PUBLIC Void rgSCHCmnActvtUlUe
7970 PUBLIC Void rgSCHCmnActvtUlUe(cell, ue)
7975 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7976 TRC2(rgSCHCmnActvtUlUe);
7978 /* : take care of this in UL retransmission */
7979 cellSchd->apisUl->rgSCHUlActvtUe(cell, ue);
7984 * @brief UE out of MeasGap or AckNackReptn.
7988 * Function : rgSCHCmnActvtDlUe
7990 * This functions triggers specific scheduler
7991 * to start considering it for scheduling.
7993 * @param[in] RgSchCellCb *cell
7994 * @param[in] RgSchUeCb *ue
8000 PUBLIC Void rgSCHCmnActvtDlUe
8006 PUBLIC Void rgSCHCmnActvtDlUe(cell, ue)
8011 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8012 TRC2(rgSCHCmnActvtDlUe);
8014 cellSchd->apisDl->rgSCHDlActvtUe(cell, ue);
8019 * @brief This API is invoked to indicate scheduler of a CRC indication.
8023 * Function : rgSCHCmnHdlUlTransInd
8024 * This API is invoked to indicate scheduler of a CRC indication.
8026 * @param[in] RgSchCellCb *cell
8027 * @param[in] RgSchUeCb *ue
8028 * @param[in] CmLteTimingInfo timingInfo
8033 PUBLIC Void rgSCHCmnHdlUlTransInd
8037 CmLteTimingInfo timingInfo
8040 PUBLIC Void rgSCHCmnHdlUlTransInd(cell, ue, timingInfo)
8043 CmLteTimingInfo timingInfo;
8046 TRC2(rgSCHCmnHdlUlTransInd);
8048 /* Update the latest UL dat/sig transmission time */
8049 RGSCHCPYTIMEINFO(timingInfo, ue->ul.ulTransTime);
8050 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
8052 /* Some UL Transmission from this UE.
8053 * Activate this UE if it was inactive */
8054 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8055 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8063 * @brief Compute the minimum Rank based on Codebook subset
8064 * restriction configuration for 4 Tx Ports and Tx Mode 4.
8068 * Function : rgSCHCmnComp4TxMode4
8070 * Depending on BitMap set at CBSR during Configuration
8071 * - return the least possible Rank
8074 * @param[in] U32 *pmiBitMap
8075 * @return RgSchCmnRank
8078 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4
8083 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4(pmiBitMap)
8087 U32 bitMap0, bitMap1;
8088 TRC2(rgSCHCmnComp4TxMode4);
8089 bitMap0 = pmiBitMap[0];
8090 bitMap1 = pmiBitMap[1];
8091 if((bitMap1) & 0xFFFF)
8093 RETVALUE (RG_SCH_CMN_RANK_1);
8095 else if((bitMap1>>16) & 0xFFFF)
8097 RETVALUE (RG_SCH_CMN_RANK_2);
8099 else if((bitMap0) & 0xFFFF)
8101 RETVALUE (RG_SCH_CMN_RANK_3);
8103 else if((bitMap0>>16) & 0xFFFF)
8105 RETVALUE (RG_SCH_CMN_RANK_4);
8109 RETVALUE (RG_SCH_CMN_RANK_1);
8115 * @brief Compute the minimum Rank based on Codebook subset
8116 * restriction configuration for 2 Tx Ports and Tx Mode 4.
8120 * Function : rgSCHCmnComp2TxMode4
8122 * Depending on BitMap set at CBSR during Configuration
8123 * - return the least possible Rank
8126 * @param[in] U32 *pmiBitMap
8127 * @return RgSchCmnRank
8130 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4
8135 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4(pmiBitMap)
8140 TRC2(rgSCHCmnComp2TxMode4);
8141 bitMap0 = pmiBitMap[0];
8142 if((bitMap0>>26)& 0x0F)
8144 RETVALUE (RG_SCH_CMN_RANK_1);
8146 else if((bitMap0>>30) & 3)
8148 RETVALUE (RG_SCH_CMN_RANK_2);
8152 RETVALUE (RG_SCH_CMN_RANK_1);
8157 * @brief Compute the minimum Rank based on Codebook subset
8158 * restriction configuration for 4 Tx Ports and Tx Mode 3.
8162 * Function : rgSCHCmnComp4TxMode3
8164 * Depending on BitMap set at CBSR during Configuration
8165 * - return the least possible Rank
8168 * @param[in] U32 *pmiBitMap
8169 * @return RgSchCmnRank
8172 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3
8177 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3(pmiBitMap)
8182 TRC2(rgSCHCmnComp4TxMode3);
8183 bitMap0 = pmiBitMap[0];
8184 if((bitMap0>>28)& 1)
8186 RETVALUE (RG_SCH_CMN_RANK_1);
8188 else if((bitMap0>>29) &1)
8190 RETVALUE (RG_SCH_CMN_RANK_2);
8192 else if((bitMap0>>30) &1)
8194 RETVALUE (RG_SCH_CMN_RANK_3);
8196 else if((bitMap0>>31) &1)
8198 RETVALUE (RG_SCH_CMN_RANK_4);
8202 RETVALUE (RG_SCH_CMN_RANK_1);
8207 * @brief Compute the minimum Rank based on Codebook subset
8208 * restriction configuration for 2 Tx Ports and Tx Mode 3.
8212 * Function : rgSCHCmnComp2TxMode3
8214 * Depending on BitMap set at CBSR during Configuration
8215 * - return the least possible Rank
8218 * @param[in] U32 *pmiBitMap
8219 * @return RgSchCmnRank
8222 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3
8227 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3(pmiBitMap)
8232 TRC2(rgSCHCmnComp2TxMode3);
8233 bitMap0 = pmiBitMap[0];
8234 if((bitMap0>>30)& 1)
8236 RETVALUE (RG_SCH_CMN_RANK_1);
8238 else if((bitMap0>>31) &1)
8240 RETVALUE (RG_SCH_CMN_RANK_2);
8244 RETVALUE (RG_SCH_CMN_RANK_1);
8249 * @brief Compute the minimum Rank based on Codebook subset
8250 * restriction configuration.
8254 * Function : rgSCHCmnComputeRank
8256 * Depending on Num Tx Ports and Transmission mode
8257 * - return the least possible Rank
8260 * @param[in] RgrTxMode txMode
8261 * @param[in] U32 *pmiBitMap
8262 * @param[in] U8 numTxPorts
8263 * @return RgSchCmnRank
8266 PRIVATE RgSchCmnRank rgSCHCmnComputeRank
8273 PRIVATE RgSchCmnRank rgSCHCmnComputeRank(txMode, pmiBitMap, numTxPorts)
8279 TRC2(rgSCHCmnComputeRank);
8281 if (numTxPorts ==2 && txMode == RGR_UE_TM_3)
8283 RETVALUE (rgSCHCmnComp2TxMode3(pmiBitMap));
8285 else if (numTxPorts ==4 && txMode == RGR_UE_TM_3)
8287 RETVALUE (rgSCHCmnComp4TxMode3(pmiBitMap));
8289 else if (numTxPorts ==2 && txMode == RGR_UE_TM_4)
8291 RETVALUE (rgSCHCmnComp2TxMode4(pmiBitMap));
8293 else if (numTxPorts ==4 && txMode == RGR_UE_TM_4)
8295 RETVALUE (rgSCHCmnComp4TxMode4(pmiBitMap));
8299 RETVALUE (RG_SCH_CMN_RANK_1);
8306 * @brief Harq Entity Deinitialization for CMN SCH.
8310 * Function : rgSCHCmnDlDeInitHqEnt
8312 * Harq Entity Deinitialization for CMN SCH
8314 * @param[in] RgSchCellCb *cell
8315 * @param[in] RgSchDlHqEnt *hqE
8318 /*KWORK_FIX:Changed function return type to void */
8320 PUBLIC Void rgSCHCmnDlDeInitHqEnt
8326 PUBLIC Void rgSCHCmnDlDeInitHqEnt(cell, hqE)
8331 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8332 RgSchDlHqProcCb *hqP;
8336 TRC2(rgSCHCmnDlDeInitHqEnt);
8338 ret = cellSchd->apisDl->rgSCHDlUeHqEntDeInit(cell, hqE);
8339 /* Free only If the Harq proc are created*/
8344 for(cnt = 0; cnt < hqE->numHqPrcs; cnt++)
8346 hqP = &hqE->procs[cnt];
8347 if ((RG_SCH_CMN_GET_DL_HQP(hqP)))
8349 rgSCHUtlFreeSBuf(cell->instIdx,
8350 (Data**)(&(hqP->sch)), (sizeof(RgSchCmnDlHqProc)));
8354 rgSCHLaaDeInitDlHqProcCb (cell, hqE);
8361 * @brief Harq Entity initialization for CMN SCH.
8365 * Function : rgSCHCmnDlInitHqEnt
8367 * Harq Entity initialization for CMN SCH
8369 * @param[in] RgSchCellCb *cell
8370 * @param[in] RgSchUeCb *ue
8376 PUBLIC S16 rgSCHCmnDlInitHqEnt
8382 PUBLIC S16 rgSCHCmnDlInitHqEnt(cell, hqEnt)
8384 RgSchDlHqEnt *hqEnt;
8388 RgSchDlHqProcCb *hqP;
8391 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8392 TRC2(rgSCHCmnDlInitHqEnt);
8394 for(cnt = 0; cnt < hqEnt->numHqPrcs; cnt++)
8396 hqP = &hqEnt->procs[cnt];
8397 if (rgSCHUtlAllocSBuf(cell->instIdx,
8398 (Data**)&(hqP->sch), (sizeof(RgSchCmnDlHqProc))) != ROK)
8404 if((cell->emtcEnable) &&(hqEnt->ue->isEmtcUe))
8406 if(ROK != cellSchd->apisEmtcDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8415 if(ROK != cellSchd->apisDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8422 } /* rgSCHCmnDlInitHqEnt */
8425 * @brief This function computes distribution of refresh period
8429 * Function: rgSCHCmnGetRefreshDist
8430 * Purpose: This function computes distribution of refresh period
8431 * This is required to align set of UEs refresh
8432 * around the different consecutive subframe.
8434 * Invoked by: rgSCHCmnGetRefreshPerDist
8436 * @param[in] RgSchCellCb *cell
8437 * @param[in] RgSchUeCb *ue
8442 PRIVATE U8 rgSCHCmnGetRefreshDist
8448 PRIVATE U8 rgSCHCmnGetRefreshDist(cell, ue)
8455 Inst inst = cell->instIdx;
8457 TRC2(rgSCHCmnGetRefreshDist);
8459 for(refOffst = 0; refOffst < RGSCH_MAX_REFRESH_OFFSET; refOffst++)
8461 if(cell->refreshUeCnt[refOffst] < RGSCH_MAX_REFRESH_GRPSZ)
8463 cell->refreshUeCnt[refOffst]++;
8464 ue->refreshOffset = refOffst;
8465 /* printf("UE[%d] refresh offset[%d]. Cell refresh ue count[%d].\n", ue->ueId, refOffst, cell->refreshUeCnt[refOffst]); */
8470 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Allocation of refresh distribution failed\n"));
8471 /* We should not enter here normally, but incase of failure, allocating from last offset*/
8472 cell->refreshUeCnt[refOffst-1]++;
8473 ue->refreshOffset = refOffst-1;
8475 RETVALUE(refOffst-1);
8478 * @brief This function computes initial Refresh Wait Period.
8482 * Function: rgSCHCmnGetRefreshPer
8483 * Purpose: This function computes initial Refresh Wait Period.
8484 * This is required to align multiple UEs refresh
8485 * around the same time.
8487 * Invoked by: rgSCHCmnGetRefreshPer
8489 * @param[in] RgSchCellCb *cell
8490 * @param[in] RgSchUeCb *ue
8491 * @param[in] U32 *waitPer
8496 PRIVATE Void rgSCHCmnGetRefreshPer
8503 PRIVATE Void rgSCHCmnGetRefreshPer(cell, ue, waitPer)
8512 TRC2(rgSCHCmnGetRefreshPer);
8514 refreshPer = RG_SCH_CMN_REFRESH_TIME * RG_SCH_CMN_REFRESH_TIMERES;
8515 crntSubFrm = cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot;
8516 /* Fix: syed align multiple UEs to refresh at same time */
8517 *waitPer = refreshPer - (crntSubFrm % refreshPer);
8518 *waitPer = RGSCH_CEIL(*waitPer, RG_SCH_CMN_REFRESH_TIMERES);
8519 *waitPer = *waitPer + rgSCHCmnGetRefreshDist(cell, ue);
8527 * @brief UE initialisation for scheduler.
8531 * Function : rgSCHCmnRgrSCellUeCfg
8533 * This functions intialises UE specific scheduler
8534 * information for SCELL
8535 * 0. Perform basic validations
8536 * 1. Allocate common sched UE cntrl blk
8537 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8539 * 4. Perform DLFS cfg
8541 * @param[in] RgSchCellCb *cell
8542 * @param[in] RgSchUeCb *ue
8543 * @param[out] RgSchErrInfo *err
8549 PUBLIC S16 rgSCHCmnRgrSCellUeCfg
8553 RgrUeSecCellCfg *sCellInfoCfg,
8557 PUBLIC S16 rgSCHCmnRgrSCellUeCfg(sCell, ue, sCellInfoCfg, err)
8560 RgrUeSecCellCfg *sCellInfoCfg;
8567 RgSchCmnAllocRecord *allRcd;
8568 RgSchDlRbAlloc *allocInfo;
8569 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8571 RgSchCmnUlUe *ueUlPcell;
8572 RgSchCmnUe *pCellUeSchCmn;
8573 RgSchCmnUe *ueSchCmn;
8575 RgSchCmnDlUe *pCellUeDl;
8577 Inst inst = ue->cell->instIdx;
8579 U32 idx = (U8)((sCell->cellId - rgSchCb[sCell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8580 TRC2(rgSCHCmnRgrSCellUeCfg);
8582 pCellUeSchCmn = RG_SCH_CMN_GET_UE(ue,ue->cell);
8583 pCellUeDl = &pCellUeSchCmn->dl;
8585 /* 1. Allocate Common sched control block */
8586 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8587 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8589 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Memory allocation FAILED\n"));
8590 err->errCause = RGSCHERR_SCH_CFG;
8593 ueSchCmn = RG_SCH_CMN_GET_UE(ue,sCell);
8595 /*2. Perform UEs downlink configuration */
8596 ueDl = &ueSchCmn->dl;
8599 ueDl->mimoInfo = pCellUeDl->mimoInfo;
8601 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
8602 (ue->mimoInfo.txMode == RGR_UE_TM_6))
8604 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_NO_PMI);
8606 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
8608 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_RI_1);
8610 RGSCH_ARRAY_BOUND_CHECK(sCell->instIdx, rgUeCatTbl, pCellUeSchCmn->cmn.ueCat);
8611 ueDl->maxTbBits = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlTbBits;
8614 ri = RGSCH_MIN(ri, sCell->numTxAntPorts);
8615 if(((CM_LTE_UE_CAT_6 == pCellUeSchCmn->cmn.ueCat )
8616 ||(CM_LTE_UE_CAT_7 == pCellUeSchCmn->cmn.ueCat))
8619 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[1];
8623 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[0];
8626 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
8628 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8629 rgSchTddDlNumHarqProcTbl[sCell->ulDlCfgIdx]);
8631 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8632 RGSCH_NUM_DL_HQ_PROC);
8635 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, ue->isEmtcUe);
8637 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, FALSE);
8641 /* ambrCfgd config moved to ueCb.dl, as it's not needed for per cell wise*/
8643 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, sCell);
8644 allocInfo->rnti = ue->ueId;
8646 /* Initializing the lastCfi value to current cfi value */
8647 ueDl->lastCfi = cellSchd->dl.currCfi;
8649 if ((cellSchd->apisDl->rgSCHRgrSCellDlUeCfg(sCell, ue, err)) != ROK)
8651 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Spec Sched DL UE CFG FAILED\n"));
8655 /* TODO: enhance for DLFS RB Allocation for SCELLs in future dev */
8657 /* DLFS UE Config */
8658 if (cellSchd->dl.isDlFreqSel)
8660 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeCfg(sCell, ue, sCellInfoCfg, err)) != ROK)
8662 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS UE config FAILED\n"));
8667 /* TODO: Do UL SCELL CFG during UL CA dev */
8669 ueUl = RG_SCH_CMN_GET_UL_UE(ue, sCell);
8671 /* TODO_ULCA: SRS for SCELL needs to be handled in the below function call */
8672 rgSCHCmnUpdUeUlCqiInfo(sCell, ue, ueUl, ueSchCmn, cellSchd,
8673 sCell->isCpUlExtend);
8675 ret = rgSCHUhmHqEntInit(sCell, ue);
8678 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL UHM HARQ Ent Init "
8679 "Failed for CRNTI:%d", ue->ueId);
8683 ueUlPcell = RG_SCH_CMN_GET_UL_UE(ue, ue->cell);
8684 /* Initialize uplink HARQ related information for UE */
8685 ueUl->hqEnt.maxHqRetx = ueUlPcell->hqEnt.maxHqRetx;
8686 cmLListInit(&ueUl->hqEnt.free);
8687 cmLListInit(&ueUl->hqEnt.inUse);
8688 for(i=0; i < ueUl->hqEnt.numHqPrcs; i++)
8690 ueUl->hqEnt.hqProcCb[i].hqEnt = (void*)(&ueUl->hqEnt);
8691 ueUl->hqEnt.hqProcCb[i].procId = i;
8692 ueUl->hqEnt.hqProcCb[i].ulSfIdx = RGSCH_INVALID_INFO;
8693 ueUl->hqEnt.hqProcCb[i].alloc = NULLP;
8695 /* ccpu00139513- Initializing SPS flags*/
8696 ueUl->hqEnt.hqProcCb[i].isSpsActvnHqP = FALSE;
8697 ueUl->hqEnt.hqProcCb[i].isSpsOccnHqP = FALSE;
8699 cmLListAdd2Tail(&ueUl->hqEnt.free, &ueUl->hqEnt.hqProcCb[i].lnk);
8700 ueUl->hqEnt.hqProcCb[i].lnk.node = (PTR)&ueUl->hqEnt.hqProcCb[i];
8703 /* Allocate UL BSR allocation tracking List */
8704 cmLListInit(&ueUl->ulAllocLst);
8706 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
8708 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8709 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
8711 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL Memory allocation FAILED"
8712 "for CRNTI:%d",ue->ueId);
8713 err->errCause = RGSCHERR_SCH_CFG;
8716 allRcd->allocTime = sCell->crntTime;
8717 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
8718 allRcd->lnk.node = (PTR)allRcd;
8721 /* After initialising UL part, do power related init */
8722 ret = rgSCHPwrUeSCellCfg(sCell, ue, sCellInfoCfg);
8725 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Could not do "
8726 "power config for UE CRNTI:%d",ue->ueId);
8731 if(TRUE == ue->isEmtcUe)
8733 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8735 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8736 "for CRNTI:%d",ue->ueId);
8743 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8745 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8746 "for CRNTI:%d",ue->ueId);
8751 ue->ul.isUlCaEnabled = TRUE;
8755 } /* rgSCHCmnRgrSCellUeCfg */
8759 * @brief UE initialisation for scheduler.
8763 * Function : rgSCHCmnRgrSCellUeDel
8765 * This functions Delete UE specific scheduler
8766 * information for SCELL
8768 * @param[in] RgSchCellCb *cell
8769 * @param[in] RgSchUeCb *ue
8775 PUBLIC S16 rgSCHCmnRgrSCellUeDel
8777 RgSchUeCellInfo *sCellInfo,
8781 PUBLIC S16 rgSCHCmnRgrSCellUeDel(sCellInfo, ue)
8782 RgSchUeCellInfo *sCellInfo;
8786 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8787 Inst inst = ue->cell->instIdx;
8789 TRC2(rgSCHCmnRgrSCellUeDel);
8791 cellSchd->apisDl->rgSCHRgrSCellDlUeDel(sCellInfo, ue);
8794 rgSCHCmnUlUeDelAllocs(sCellInfo->cell, ue);
8797 if(TRUE == ue->isEmtcUe)
8799 cellSchd->apisEmtcUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8804 cellSchd->apisUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8807 /* DLFS UE Config */
8808 if (cellSchd->dl.isDlFreqSel)
8810 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeDel(sCellInfo->cell, ue)) != ROK)
8812 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS Scell del FAILED\n"));
8817 rgSCHUtlFreeSBuf(sCellInfo->cell->instIdx,
8818 (Data**)(&(sCellInfo->sch)), (sizeof(RgSchCmnUe)));
8822 } /* rgSCHCmnRgrSCellUeDel */
8828 * @brief Handles 5gtf configuration for a UE
8832 * Function : rgSCHCmn5gtfUeCfg
8838 * @param[in] RgSchCellCb *cell
8839 * @param[in] RgSchUeCb *ue
8840 * @param[in] RgrUeCfg *cfg
8846 PUBLIC S16 rgSCHCmn5gtfUeCfg
8853 PUBLIC S16 rgSCHCmn5gtfUeCfg(cell, ue, cfg)
8859 TRC2(rgSCHCmnRgrUeCfg);
8861 RgSchUeGrp *ue5gtfGrp;
8862 ue->ue5gtfCb.grpId = cfg->ue5gtfCfg.grpId;
8863 ue->ue5gtfCb.BeamId = cfg->ue5gtfCfg.BeamId;
8864 ue->ue5gtfCb.numCC = cfg->ue5gtfCfg.numCC;
8865 ue->ue5gtfCb.mcs = cfg->ue5gtfCfg.mcs;
8866 ue->ue5gtfCb.maxPrb = cfg->ue5gtfCfg.maxPrb;
8868 ue->ue5gtfCb.cqiRiPer = 100;
8869 /* 5gtf TODO: CQIs to start from (10,0)*/
8870 ue->ue5gtfCb.nxtCqiRiOccn.sfn = 10;
8871 ue->ue5gtfCb.nxtCqiRiOccn.slot = 0;
8872 ue->ue5gtfCb.rank = 1;
8874 printf("\nschd cfg at mac,%u,%u,%u,%u,%u\n",ue->ue5gtfCb.grpId,ue->ue5gtfCb.BeamId,ue->ue5gtfCb.numCC,
8875 ue->ue5gtfCb.mcs,ue->ue5gtfCb.maxPrb);
8877 ue5gtfGrp = &(cell->cell5gtfCb.ueGrp5gConf[ue->ue5gtfCb.BeamId]);
8879 /* TODO_5GTF: Currently handling 1 group only. Need to update when multi group
8880 scheduling comes into picture */
8881 if(ue5gtfGrp->beamBitMask & (1 << ue->ue5gtfCb.BeamId))
8883 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8884 "5GTF_ERROR Invalid beam id CRNTI:%d",cfg->crnti);
8887 ue5gtfGrp->beamBitMask |= (1 << ue->ue5gtfCb.BeamId);
8894 * @brief UE initialisation for scheduler.
8898 * Function : rgSCHCmnRgrUeCfg
8900 * This functions intialises UE specific scheduler
8902 * 0. Perform basic validations
8903 * 1. Allocate common sched UE cntrl blk
8904 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8906 * 4. Perform DLFS cfg
8908 * @param[in] RgSchCellCb *cell
8909 * @param[in] RgSchUeCb *ue
8910 * @param[int] RgrUeCfg *ueCfg
8911 * @param[out] RgSchErrInfo *err
8917 PUBLIC S16 rgSCHCmnRgrUeCfg
8925 PUBLIC S16 rgSCHCmnRgrUeCfg(cell, ue, ueCfg, err)
8932 RgSchDlRbAlloc *allocInfo;
8934 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8935 RgSchCmnUe *ueSchCmn;
8939 RgSchCmnAllocRecord *allRcd;
8941 U32 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8942 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
8943 TRC2(rgSCHCmnRgrUeCfg);
8946 /* 1. Allocate Common sched control block */
8947 if((rgSCHUtlAllocSBuf(cell->instIdx,
8948 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8950 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8951 "Memory allocation FAILED for CRNTI:%d",ueCfg->crnti);
8952 err->errCause = RGSCHERR_SCH_CFG;
8955 ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
8956 ue->dl.ueDlCqiCfg = ueCfg->ueDlCqiCfg;
8957 pCellInfo->acqiCb.aCqiCfg = ueCfg->ueDlCqiCfg.aprdCqiCfg;
8958 if(ueCfg->ueCatEnum > 0 )
8960 /*KWORK_FIX removed NULL chk for ueSchCmn*/
8961 ueSchCmn->cmn.ueCat = ueCfg->ueCatEnum - 1;
8965 ueSchCmn->cmn.ueCat = 0; /* Assuming enum values correctly set */
8967 cmInitTimers(&ueSchCmn->cmn.tmr, 1);
8969 /*2. Perform UEs downlink configuration */
8970 ueDl = &ueSchCmn->dl;
8971 /* RACHO : store the rapId assigned for HandOver UE.
8972 * Append UE to handover list of cmnCell */
8973 if (ueCfg->dedPreambleId.pres == PRSNT_NODEF)
8975 rgSCHCmnDelDedPreamble(cell, ueCfg->dedPreambleId.val);
8976 ueDl->rachInfo.hoRapId = ueCfg->dedPreambleId.val;
8977 cmLListAdd2Tail(&cellSchd->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
8978 ueDl->rachInfo.hoLnk.node = (PTR)ue;
8981 rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd);
8983 if (ueCfg->txMode.pres == TRUE)
8985 if ((ueCfg->txMode.txModeEnum == RGR_UE_TM_4) ||
8986 (ueCfg->txMode.txModeEnum == RGR_UE_TM_6))
8988 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
8990 if (ueCfg->txMode.txModeEnum == RGR_UE_TM_3)
8992 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
8995 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
8996 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
8999 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9000 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9001 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9004 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9008 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9011 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
9013 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9014 rgSchTddDlNumHarqProcTbl[cell->ulDlCfgIdx]);
9016 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9017 RGSCH_NUM_DL_HQ_PROC);
9020 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
9022 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
9024 /* if none of the DL and UL AMBR are configured then fail the configuration
9026 if((ueCfg->ueQosCfg.dlAmbr == 0) && (ueCfg->ueQosCfg.ueBr == 0))
9028 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"UL Ambr and DL Ambr are"
9029 "configured as 0 for CRNTI:%d",ueCfg->crnti);
9030 err->errCause = RGSCHERR_SCH_CFG;
9034 ue->dl.ambrCfgd = (ueCfg->ueQosCfg.dlAmbr * RG_SCH_CMN_REFRESH_TIME)/100;
9036 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
9037 allocInfo->rnti = ue->ueId;
9039 /* Initializing the lastCfi value to current cfi value */
9040 ueDl->lastCfi = cellSchd->dl.currCfi;
9042 if(cell->emtcEnable && ue->isEmtcUe)
9044 if ((cellSchd->apisEmtcDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9046 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9047 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9055 if ((cellSchd->apisDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9057 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9058 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9065 /* 3. Initialize ul part */
9066 ueUl = &ueSchCmn->ul;
9068 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
9069 cell->isCpUlExtend);
9071 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9072 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9074 ue->ul.cfgdAmbr = (ueCfg->ueQosCfg.ueBr * RG_SCH_CMN_REFRESH_TIME)/100;
9075 ue->ul.effAmbr = ue->ul.cfgdAmbr;
9076 RGSCHCPYTIMEINFO(cell->crntTime, ue->ul.ulTransTime);
9078 /* Allocate UL BSR allocation tracking List */
9079 cmLListInit(&ueUl->ulAllocLst);
9081 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
9083 if((rgSCHUtlAllocSBuf(cell->instIdx,
9084 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
9086 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation FAILED"
9087 "for CRNTI:%d",ueCfg->crnti);
9088 err->errCause = RGSCHERR_SCH_CFG;
9091 allRcd->allocTime = cell->crntTime;
9092 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
9093 allRcd->lnk.node = (PTR)allRcd;
9095 /* Allocate common sch cntrl blocks for LCGs */
9096 for (cnt=0; cnt<RGSCH_MAX_LCG_PER_UE; cnt++)
9098 ret = rgSCHUtlAllocSBuf(cell->instIdx,
9099 (Data**)&(ue->ul.lcgArr[cnt].sch), (sizeof(RgSchCmnLcg)));
9102 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9103 "SCH struct alloc failed for CRNTI:%d",ueCfg->crnti);
9104 err->errCause = RGSCHERR_SCH_CFG;
9108 /* After initialising UL part, do power related init */
9109 ret = rgSCHPwrUeCfg(cell, ue, ueCfg);
9112 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9113 "power config for UE CRNTI:%d",ueCfg->crnti);
9117 ret = rgSCHCmnSpsUeCfg(cell, ue, ueCfg, err);
9120 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9121 "SPS config for CRNTI:%d",ueCfg->crnti);
9124 #endif /* LTEMAC_SPS */
9127 if(TRUE == ue->isEmtcUe)
9129 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9131 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9132 "for CRNTI:%d",ueCfg->crnti);
9139 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9141 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9142 "for CRNTI:%d",ueCfg->crnti);
9147 /* DLFS UE Config */
9148 if (cellSchd->dl.isDlFreqSel)
9150 if ((cellSchd->apisDlfs->rgSCHDlfsUeCfg(cell, ue, ueCfg, err)) != ROK)
9152 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "DLFS UE config FAILED"
9153 "for CRNTI:%d",ueCfg->crnti);
9158 /* Fix: syed align multiple UEs to refresh at same time */
9159 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9160 /* Start UE Qos Refresh Timer */
9161 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9163 rgSCHCmn5gtfUeCfg(cell, ue, ueCfg);
9167 } /* rgSCHCmnRgrUeCfg */
9170 * @brief UE TX mode reconfiguration handler.
9174 * Function : rgSCHCmnDlHdlTxModeRecfg
9176 * This functions updates UE specific scheduler
9177 * information upon UE reconfiguration.
9179 * @param[in] RgSchUeCb *ue
9180 * @param[in] RgrUeRecfg *ueRecfg
9185 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9189 RgrUeRecfg *ueRecfg,
9193 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, numTxPorts)
9196 RgrUeRecfg *ueRecfg;
9201 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9208 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg)
9211 RgrUeRecfg *ueRecfg;
9215 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9216 TRC2(rgSCHCmnDlHdlTxModeRecfg);
9218 if (ueRecfg->txMode.pres != PRSNT_NODEF)
9222 /* ccpu00140894- Starting Timer for TxMode Transition Completion*/
9223 ue->txModeTransCmplt =FALSE;
9224 rgSCHTmrStartTmr (ue->cell, ue, RG_SCH_TMR_TXMODE_TRNSTN, RG_SCH_TXMODE_TRANS_TIMER);
9225 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_CMPLT)
9227 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell,
9228 RG_SCH_CMN_TD_TXMODE_RECFG);
9229 /* MS_WORKAROUND for ccpu00123186 MIMO Fix Start: need to set FORCE TD bitmap based on TX mode */
9230 ueDl->mimoInfo.ri = 1;
9231 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9232 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9234 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9236 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9238 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9240 /* MIMO Fix End: need to set FORCE TD bitmap based on TX mode */
9243 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_START)
9245 /* start afresh forceTD masking */
9246 RG_SCH_CMN_INIT_FORCE_TD(ue, cell, 0);
9247 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_TXMODE_RECFG);
9248 /* Intialize MIMO related parameters of UE */
9251 if(ueRecfg->txMode.pres)
9253 if((ueRecfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9254 (ueRecfg->txMode.txModeEnum ==RGR_UE_TM_4))
9256 if(ueRecfg->ueCodeBookRstRecfg.pres)
9259 rgSCHCmnComputeRank(ueRecfg->txMode.txModeEnum,
9260 ueRecfg->ueCodeBookRstRecfg.pmiBitMap, numTxPorts);
9264 ueDl->mimoInfo.ri = 1;
9269 ueDl->mimoInfo.ri = 1;
9274 ueDl->mimoInfo.ri = 1;
9277 ueDl->mimoInfo.ri = 1;
9278 #endif /* TFU_UPGRADE */
9279 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9280 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9282 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9284 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9286 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9291 /***********************************************************
9293 * Func : rgSCHCmnUpdUeMimoInfo
9295 * Desc : Updates UL and DL Ue Information
9303 **********************************************************/
9305 PRIVATE Void rgSCHCmnUpdUeMimoInfo
9310 RgSchCmnCell *cellSchd
9313 PRIVATE Void rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd)
9317 RgSchCmnCell *cellSchd;
9320 TRC2(rgSCHCmnUpdUeMimoInfo)
9322 if(ueCfg->txMode.pres)
9324 if((ueCfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9325 (ueCfg->txMode.txModeEnum ==RGR_UE_TM_4))
9327 if(ueCfg->ueCodeBookRstCfg.pres)
9330 rgSCHCmnComputeRank(ueCfg->txMode.txModeEnum,
9331 ueCfg->ueCodeBookRstCfg.pmiBitMap, cell->numTxAntPorts);
9335 ueDl->mimoInfo.ri = 1;
9340 ueDl->mimoInfo.ri = 1;
9345 ueDl->mimoInfo.ri = 1;
9349 ueDl->mimoInfo.ri = 1;
9350 #endif /*TFU_UPGRADE */
9351 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
9352 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
9356 /***********************************************************
9358 * Func : rgSCHCmnUpdUeUlCqiInfo
9360 * Desc : Updates UL and DL Ue Information
9368 **********************************************************/
9370 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo
9375 RgSchCmnUe *ueSchCmn,
9376 RgSchCmnCell *cellSchd,
9380 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd, isEcp)
9384 RgSchCmnUe *ueSchCmn;
9385 RgSchCmnCell *cellSchd;
9390 TRC2(rgSCHCmnUpdUeUlCqiInfo)
9393 if(ue->srsCb.srsCfg.type == RGR_SCH_SRS_SETUP)
9395 if(ue->ul.ulTxAntSel.pres)
9397 ueUl->crntUlCqi[ue->srsCb.selectedAnt] = cellSchd->ul.dfltUlCqi;
9398 ueUl->validUlCqi = ueUl->crntUlCqi[ue->srsCb.selectedAnt];
9402 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9403 ueUl->validUlCqi = ueUl->crntUlCqi[0];
9405 ue->validTxAnt = ue->srsCb.selectedAnt;
9409 ueUl->validUlCqi = cellSchd->ul.dfltUlCqi;
9413 ueUl->ulLaCb.cqiBasediTbs = rgSchCmnUlCqiToTbsTbl[isEcp]
9414 [ueUl->validUlCqi] * 100;
9415 ueUl->ulLaCb.deltaiTbs = 0;
9419 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9420 #endif /*TFU_UPGRADE */
9421 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
9422 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9424 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9428 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9433 /***********************************************************
9435 * Func : rgSCHCmnUpdUeCatCfg
9437 * Desc : Updates UL and DL Ue Information
9445 **********************************************************/
9447 PRIVATE Void rgSCHCmnUpdUeCatCfg
9453 PRIVATE Void rgSCHCmnUpdUeCatCfg(ue, cell)
9458 RgSchDlHqEnt *hqE = NULLP;
9459 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9460 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9461 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
9462 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
9464 TRC2(rgSCHCmnUpdUeCatCfg)
9466 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
9468 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9471 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9472 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9473 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9474 && (RG_SCH_MAX_TX_LYRS_4 == ri))
9476 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9480 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9483 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9485 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9487 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9491 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9493 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9494 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9499 * @brief UE reconfiguration for scheduler.
9503 * Function : rgSChCmnRgrUeRecfg
9505 * This functions updates UE specific scheduler
9506 * information upon UE reconfiguration.
9508 * @param[in] RgSchCellCb *cell
9509 * @param[in] RgSchUeCb *ue
9510 * @param[int] RgrUeRecfg *ueRecfg
9511 * @param[out] RgSchErrInfo *err
9517 PUBLIC S16 rgSCHCmnRgrUeRecfg
9521 RgrUeRecfg *ueRecfg,
9525 PUBLIC S16 rgSCHCmnRgrUeRecfg(cell, ue, ueRecfg, err)
9528 RgrUeRecfg *ueRecfg;
9532 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9535 TRC2(rgSCHCmnRgrUeRecfg);
9536 /* Basic validations */
9537 if (ueRecfg->ueRecfgTypes & RGR_UE_TXMODE_RECFG)
9540 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, cell->numTxAntPorts);
9542 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg);
9543 #endif /* TFU_UPGRADE */
9545 if(ueRecfg->ueRecfgTypes & RGR_UE_CSG_PARAM_RECFG)
9547 ue->csgMmbrSta = ueRecfg->csgMmbrSta;
9549 /* Changes for UE Category reconfiguration feature */
9550 if(ueRecfg->ueRecfgTypes & RGR_UE_UECAT_RECFG)
9552 rgSCHCmnUpdUeCatCfg(ue, cell);
9554 if (ueRecfg->ueRecfgTypes & RGR_UE_APRD_DLCQI_RECFG)
9556 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
9557 pCellInfo->acqiCb.aCqiCfg = ueRecfg->aprdDlCqiRecfg;
9560 if (ueRecfg->ueRecfgTypes & RGR_UE_PRD_DLCQI_RECFG)
9562 if ((ueRecfg->prdDlCqiRecfg.pres == TRUE)
9563 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD10)
9564 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD20))
9566 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unsupported periodic CQI "
9567 "reporting mode %d for old CRNIT:%d",
9568 (int)ueRecfg->prdDlCqiRecfg.prdModeEnum,ueRecfg->oldCrnti);
9569 err->errCause = RGSCHERR_SCH_CFG;
9572 ue->dl.ueDlCqiCfg.prdCqiCfg = ueRecfg->prdDlCqiRecfg;
9576 if (ueRecfg->ueRecfgTypes & RGR_UE_ULPWR_RECFG)
9578 if (rgSCHPwrUeRecfg(cell, ue, ueRecfg) != ROK)
9580 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9581 "Power Reconfiguration Failed for OLD CRNTI:%d",ueRecfg->oldCrnti);
9586 if (ueRecfg->ueRecfgTypes & RGR_UE_QOS_RECFG)
9588 /* Uplink Sched related Initialization */
9589 if ((ueRecfg->ueQosRecfg.dlAmbr == 0) && (ueRecfg->ueQosRecfg.ueBr == 0))
9591 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Ul Ambr and DL Ambr "
9592 "configured as 0 for OLD CRNTI:%d",ueRecfg->oldCrnti);
9593 err->errCause = RGSCHERR_SCH_CFG;
9596 ue->ul.cfgdAmbr = (ueRecfg->ueQosRecfg.ueBr * \
9597 RG_SCH_CMN_REFRESH_TIME)/100;
9598 /* Downlink Sched related Initialization */
9599 ue->dl.ambrCfgd = (ueRecfg->ueQosRecfg.dlAmbr * \
9600 RG_SCH_CMN_REFRESH_TIME)/100;
9601 /* Fix: syed Update the effAmbr and effUeBR fields w.r.t the
9602 * new QOS configuration */
9603 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9604 /* Fix: syed align multiple UEs to refresh at same time */
9605 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9606 rgSCHCmnApplyUeRefresh(cell, ue);
9607 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9610 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9612 if ((cellSchCmn->apisEmtcUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9614 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9615 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9618 if ((cellSchCmn->apisEmtcDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9620 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9621 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9628 if ((cellSchCmn->apisUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9630 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9631 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9634 if ((cellSchCmn->apisDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9636 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9637 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9641 /* DLFS UE Config */
9642 if (cellSchCmn->dl.isDlFreqSel)
9644 if ((cellSchCmn->apisDlfs->rgSCHDlfsUeRecfg(cell, ue, \
9645 ueRecfg, err)) != ROK)
9647 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9648 "DLFS UE re-config FAILED for CRNTI:%d",ue->ueId);
9654 /* Invoke re-configuration on SPS module */
9655 if (rgSCHCmnSpsUeRecfg(cell, ue, ueRecfg, err) != ROK)
9657 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9658 "DL SPS ReCFG FAILED for UE CRNTI:%d", ue->ueId);
9664 } /* rgSCHCmnRgrUeRecfg*/
9666 /***********************************************************
9668 * Func : rgSCHCmnUlUeDelAllocs
9670 * Desc : Deletion of all UE allocations.
9678 **********************************************************/
9680 PRIVATE Void rgSCHCmnUlUeDelAllocs
9686 PRIVATE Void rgSCHCmnUlUeDelAllocs(cell, ue)
9691 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
9692 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
9695 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
9697 TRC2(rgSCHCmnUlUeDelAllocs);
9699 for (i = 0; i < ueUl->hqEnt.numHqPrcs; ++i)
9701 RgSchUlHqProcCb *proc = rgSCHUhmGetUlHqProc(cell, ue, i);
9704 /* proc can't be NULL here */
9712 /* Added Insure Fixes Of reading Dangling memory.NULLed crntAlloc */
9714 if(proc->alloc == ulSpsUe->ulSpsSchdInfo.crntAlloc)
9716 ulSpsUe->ulSpsSchdInfo.crntAlloc = NULLP;
9717 ulSpsUe->ulSpsSchdInfo.crntAllocSf = NULLP;
9721 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9722 proc->alloc,ue->isEmtcUe);
9724 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9727 /* PHY probably needn't be intimated since
9728 * whatever intimation it needs happens at the last minute
9731 /* Fix: syed Adaptive Msg3 Retx crash. Remove the harqProc
9732 * from adaptive retx List. */
9733 if (proc->reTxLnk.node)
9736 //TODO_SID: Need to take care
9737 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
9738 proc->reTxLnk.node = (PTR)NULLP;
9746 /***********************************************************
9748 * Func : rgSCHCmnDelUeFrmRefreshQ
9750 * Desc : Adds a UE to refresh queue, so that the UE is
9751 * periodically triggered to refresh it's GBR and
9760 **********************************************************/
9762 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ
9768 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ(cell, ue)
9773 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
9775 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
9777 TRC2(rgSCHCmnDelUeFrmRefreshQ);
9779 #ifdef RGL_SPECIFIC_CHANGES
9780 if(ue->refreshOffset < RGSCH_MAX_REFRESH_GRPSZ)
9782 if(cell->refreshUeCnt[ue->refreshOffset])
9784 cell->refreshUeCnt[ue->refreshOffset]--;
9790 cmMemset((U8 *)&arg, 0, sizeof(arg));
9791 arg.tqCp = &sched->tmrTqCp;
9792 arg.tq = sched->tmrTq;
9793 arg.timers = &ueSchd->tmr;
9797 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
9803 /***********************************************************
9805 * Func : rgSCHCmnUeCcchSduDel
9807 * Desc : Clear CCCH SDU scheduling context.
9815 **********************************************************/
9817 PRIVATE Void rgSCHCmnUeCcchSduDel
9823 PRIVATE Void rgSCHCmnUeCcchSduDel(cell, ueCb)
9828 RgSchDlHqEnt *hqE = NULLP;
9829 RgSchDlHqProcCb *ccchSduHqP = NULLP;
9830 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
9832 TRC2(rgSCHCmnUeCcchSduDel);
9834 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
9839 ccchSduHqP = hqE->ccchSduProc;
9840 if(ueCb->ccchSduLnk.node != NULLP)
9842 /* Remove the ccchSduProc if it is in the Tx list */
9843 cmLListDelFrm(&(cell->ccchSduUeLst), &(ueCb->ccchSduLnk));
9844 ueCb->ccchSduLnk.node = NULLP;
9846 else if(ccchSduHqP != NULLP)
9848 /* Fix for crash due to stale pdcch. Release ccch pdcch*/
9849 if(ccchSduHqP->pdcch)
9851 cmLListDelFrm(&ccchSduHqP->subFrm->pdcchInfo.pdcchs,
9852 &ccchSduHqP->pdcch->lnk);
9853 cmLListAdd2Tail(&cell->pdcchLst, &ccchSduHqP->pdcch->lnk);
9854 ccchSduHqP->pdcch = NULLP;
9856 if(ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node != NULLP)
9858 /* Remove the ccchSduProc if it is in the retx list */
9859 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
9860 &ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk);
9861 /* ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node = NULLP; */
9862 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9864 else if ((ccchSduHqP->subFrm != NULLP) &&
9865 (ccchSduHqP->hqPSfLnk.node != NULLP))
9867 rgSCHUtlDlHqPTbRmvFrmTx(ccchSduHqP->subFrm,
9868 ccchSduHqP, 0, FALSE);
9869 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9879 * @brief UE deletion for scheduler.
9883 * Function : rgSCHCmnUeDel
9885 * This functions deletes all scheduler information
9886 * pertaining to an UE.
9888 * @param[in] RgSchCellCb *cell
9889 * @param[in] RgSchUeCb *ue
9893 PUBLIC Void rgSCHCmnUeDel
9899 PUBLIC Void rgSCHCmnUeDel(cell, ue)
9904 RgSchDlHqEnt *hqE = NULLP;
9905 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9907 RgSchCmnAllocRecord *allRcd;
9909 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9911 TRC2(rgSCHCmnUeDel);
9913 if (RG_SCH_CMN_GET_UE(ue,cell) == NULLP)
9915 /* Common scheduler config has not happened yet */
9918 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9921 /* UE Free can be triggered before MSG4 done when dlHqE is not updated */
9925 rgSCHEmtcCmnUeCcchSduDel(cell, ue);
9930 rgSCHCmnUeCcchSduDel(cell, ue);
9933 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9935 rgSCHCmnUlUeDelAllocs(cell, ue);
9937 rgSCHCmnDelRachInfo(cell, ue);
9940 if(TRUE == ue->isEmtcUe)
9942 cellSchCmn->apisEmtcUl->rgSCHFreeUlUe(cell, ue);
9947 cellSchCmn->apisUl->rgSCHFreeUlUe(cell, ue);
9952 for(idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
9954 if(ue->cellInfo[idx] != NULLP)
9956 rgSCHSCellDelUeSCell(cell,ue,idx);
9963 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9965 cellSchCmn->apisEmtcDl->rgSCHFreeDlUe(cell, ue);
9970 cellSchCmn->apisDl->rgSCHFreeDlUe(cell, ue);
9972 rgSCHPwrUeDel(cell, ue);
9975 rgSCHCmnSpsUeDel(cell, ue);
9976 #endif /* LTEMAC_SPS*/
9979 rgSchCmnDlSfHqDel(ue, cell);
9981 /* DLFS UE delete */
9982 if (cellSchCmn->dl.isDlFreqSel)
9984 cellSchCmn->apisDlfs->rgSCHDlfsUeDel(cell, ue);
9986 node = ueUl->ulAllocLst.first;
9988 /* ccpu00117052 - MOD - Passing double pointer in all the places of
9989 rgSCHUtlFreeSBuf function call for proper NULLP assignment*/
9992 allRcd = (RgSchCmnAllocRecord *)node->node;
9994 cmLListDelFrm(&ueUl->ulAllocLst, &allRcd->lnk);
9995 rgSCHUtlFreeSBuf(cell->instIdx,
9996 (Data**)(&allRcd), (sizeof(RgSchCmnAllocRecord)));
9999 for(cnt = 0; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
10001 if (ue->ul.lcgArr[cnt].sch != NULLP)
10003 rgSCHUtlFreeSBuf(cell->instIdx,
10004 (Data**)(&(ue->ul.lcgArr[cnt].sch)), (sizeof(RgSchCmnLcg)));
10008 /* Fix : syed Moved hqEnt deinit to rgSCHCmnDlDeInitHqEnt */
10009 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId) & (CM_LTE_MAX_CELLS - 1));
10010 rgSCHUtlFreeSBuf(cell->instIdx,
10011 (Data**)(&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch))), (sizeof(RgSchCmnUe)));
10013 } /* rgSCHCmnUeDel */
10017 * @brief This function handles the common code rate configurations
10018 * done as part of RgrCellCfg/RgrCellRecfg.
10022 * Function: rgSCHCmnDlCnsdrCmnRt
10023 * Purpose: This function handles the common code rate configurations
10024 * done as part of RgrCellCfg/RgrCellRecfg.
10026 * Invoked by: Scheduler
10028 * @param[in] RgSchCellCb *cell
10029 * @param[in] RgrDlCmnCodeRateCfg *dlCmnCodeRate
10034 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt
10037 RgrDlCmnCodeRateCfg *dlCmnCodeRate
10040 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt(cell, dlCmnCodeRate)
10042 RgrDlCmnCodeRateCfg *dlCmnCodeRate;
10045 RgSchCmnCell *cellDl = RG_SCH_CMN_GET_CELL(cell);
10052 TRC2(rgSCHCmnDlCnsdrCmnRt);
10054 /* code rate is bits per 1024 phy bits, since modl'n scheme is 2. it is
10055 * bits per 1024/2 REs */
10056 if (dlCmnCodeRate->bcchPchRaCodeRate != 0)
10058 bitsPerRb = ((dlCmnCodeRate->bcchPchRaCodeRate * 2) *
10059 cellDl->dl.noResPerRb[3])/1024;
10063 bitsPerRb = ((RG_SCH_CMN_DEF_BCCHPCCH_CODERATE * 2) *
10064 cellDl->dl.noResPerRb[3])/1024;
10066 /* Store bitsPerRb in cellDl->dl to use later to determine
10067 * Number of RBs for UEs with SI-RNTI, P-RNTI and RA-RNTI */
10068 cellDl->dl.bitsPerRb = bitsPerRb;
10069 /* ccpu00115595 end*/
10070 /* calculate the ITbs for 2 RBs. Initialize ITbs to MAX value */
10073 bitsPer2Rb = bitsPerRb * rbNum;
10074 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer2Rb))
10077 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs2Rbs = 0) :
10078 (cellDl->dl.cmnChITbs.iTbs2Rbs = i-1);
10080 /* calculate the ITbs for 3 RBs. Initialize ITbs to MAX value */
10083 bitsPer3Rb = bitsPerRb * rbNum;
10084 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer3Rb))
10087 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs3Rbs = 0) :
10088 (cellDl->dl.cmnChITbs.iTbs3Rbs = i-1);
10091 pdcchBits = 1 + /* Flag for format0/format1a differentiation */
10092 1 + /* Localized/distributed VRB assignment flag */
10095 3 + /* Harq process Id */
10097 4 + /* Harq process Id */
10098 2 + /* UL Index or DAI */
10100 1 + /* New Data Indicator */
10103 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
10104 (cell->bwCfg.dlTotalBw + 1))/2);
10105 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
10106 Since VRB is local */
10107 /* For TDD consider DAI */
10109 /* Convert the pdcchBits to actual pdcchBits required for transmission */
10110 if (dlCmnCodeRate->pdcchCodeRate != 0)
10112 pdcchBits = (pdcchBits * 1024)/dlCmnCodeRate->pdcchCodeRate;
10113 if (pdcchBits <= 288) /* 288 : Num of pdcch bits for aggrLvl=4 */
10115 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10117 else /* 576 : Num of pdcch bits for aggrLvl=8 */
10119 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL8;
10124 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10126 if (dlCmnCodeRate->ccchCqi == 0)
10132 cellDl->dl.ccchCqi = dlCmnCodeRate->ccchCqi;
10139 * @brief This function handles the configuration of cell for the first
10140 * time by the scheduler.
10144 * Function: rgSCHCmnDlRgrCellCfg
10145 * Purpose: Configuration received is stored into the data structures
10146 * Also, update the scheduler with the number of frames of
10147 * RACH preamble transmission.
10149 * Invoked by: BO and Scheduler
10151 * @param[in] RgSchCellCb* cell
10152 * @param[in] RgrCellCfg* cfg
10157 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10164 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10170 RgSchCmnCell *cellSch;
10175 U8 maxDlSubfrms = cell->numDlSubfrms;
10176 U8 splSubfrmIdx = cfg->spclSfCfgIdx;
10179 RgSchTddSubfrmInfo subfrmInfo = rgSchTddMaxUlSubfrmTbl[cell->ulDlCfgIdx];
10190 TRC2(rgSCHCmnDlRgrCellCfg);
10193 cellSch = RG_SCH_CMN_GET_CELL(cell);
10194 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->\
10195 rachCfg.preambleFormat];
10196 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10197 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10199 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10200 3 TTI (MAX L1+L2 processing delay at the UE) */
10201 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10202 rgSchCmnHarqRtt[cell->ulDlCfgIdx] + 3;
10203 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10204 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10205 if (cfg->maxUePerDlSf == 0)
10207 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10209 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10215 if (cell->bwCfg.dlTotalBw <= 10)
10225 /* DwPTS Scheduling Changes Start */
10226 cellSch->dl.splSfCfg = splSubfrmIdx;
10228 if (cfg->isCpDlExtend == TRUE)
10230 if((0 == splSubfrmIdx) || (4 == splSubfrmIdx) ||
10231 (7 == splSubfrmIdx) || (8 == splSubfrmIdx)
10234 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10238 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10243 /* Refer to 36.213 Section 7.1.7 */
10244 if((0 == splSubfrmIdx) || (5 == splSubfrmIdx))
10246 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10250 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10253 /* DwPTS Scheduling Changes End */
10255 splSfCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10256 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
10258 for (sfCount = 0; sfCount < maxDlSubfrms; sfCount++)
10260 sf = cell->subFrms[sfCount];
10261 /* Sfcount matches the first special subframe occurs at Index 0
10262 * or subsequent special subframes */
10263 if(subfrmInfo.switchPoints == 1)
10265 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10266 RG_SCH_CMN_10_MS_PRD, &subfrmInfo);
10270 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10271 RG_SCH_CMN_5_MS_PRD, &subfrmInfo);
10273 if(isSplfrm == TRUE)
10276 /* DwPTS Scheduling Changes Start */
10277 if (cell->splSubfrmCfg.isDlDataAllowed == TRUE)
10279 sf->sfType = RG_SCH_SPL_SF_DATA;
10283 sf->sfType = RG_SCH_SPL_SF_NO_DATA;
10285 /* DwPTS Scheduling Changes End */
10289 /* DwPTS Scheduling Changes Start */
10290 if (sf->sfNum != 0)
10292 sf->sfType = RG_SCH_DL_SF;
10296 sf->sfType = RG_SCH_DL_SF_0;
10298 /* DwPTS Scheduling Changes End */
10301 /* Calculate the number of CCEs per subframe in the cell */
10302 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][sf->sfNum];
10303 if(cell->dynCfiCb.isDynCfiEnb == TRUE)
10305 /* In case if Dynamic CFI feature is enabled, default CFI
10306 * value 1 is used */
10307 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][1];
10311 if (sf->sfType == RG_SCH_SPL_SF_DATA)
10313 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
10317 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi)];
10322 /* Intialize the RACH response scheduling related infromation */
10323 if(rgSCHCmnDlRachInfoInit(cell) != ROK)
10328 /* Allocate PRACH preamble list */
10329 rgSCHCmnDlCreateRachPrmLst(cell);
10331 /* Initialize PHICH offset information */
10332 rgSCHCmnDlPhichOffsetInit(cell);
10334 /* Update the size of HARQ ACK/NACK feedback table */
10335 /* The array size is increased by 2 to have enough free indices, where other
10336 * indices are busy waiting for HARQ feedback */
10337 cell->ackNackFdbkArrSize = rgSchTddANFdbkMapTbl[cell->ulDlCfgIdx] + 2;
10339 /* Initialize expected HARQ ACK/NACK feedback time */
10340 rgSCHCmnDlANFdbkInit(cell);
10342 /* Initialize UL association set index */
10343 if(cell->ulDlCfgIdx != 0)
10345 rgSCHCmnDlKdashUlAscInit(cell);
10348 if (cfg->isCpDlExtend == TRUE)
10350 cp = RG_SCH_CMN_EXT_CP;
10352 cell->splSubfrmCfg.dwPts =
10353 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlDwPts;
10355 if ( cell->splSubfrmCfg.dwPts == 0 )
10357 cell->isDwPtsCnted = FALSE;
10361 cell->isDwPtsCnted = TRUE;
10364 if(cfg->isCpUlExtend == TRUE)
10366 cell->splSubfrmCfg.upPts =
10367 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlExtUpPts;
10371 cell->splSubfrmCfg.upPts =
10372 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlNorUpPts;
10377 cp = RG_SCH_CMN_NOR_CP;
10379 cell->splSubfrmCfg.dwPts =
10380 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlDwPts;
10381 cell->isDwPtsCnted = TRUE;
10383 if(cfg->isCpUlExtend == TRUE)
10385 cell->splSubfrmCfg.upPts =
10386 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlExtUpPts;
10390 cell->splSubfrmCfg.upPts =
10391 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlNorUpPts;
10395 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10396 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++,cfiIdx++)
10398 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10399 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10400 [cell->numTxAntPorts]][cfiIdx];
10401 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10402 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10403 [cell->numTxAntPorts]][cfiIdx];
10406 /* Initializing the values of CFI parameters */
10407 if(cell->dynCfiCb.isDynCfiEnb)
10409 /* If DCFI is enabled, current CFI value will start from 1 */
10410 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10414 /* If DCFI is disabled, current CFI value is set as default max allowed CFI value */
10415 cellSch->dl.currCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10416 cellSch->dl.newCfi = cellSch->dl.currCfi;
10419 /* Include CRS REs while calculating Efficiency
10420 * The number of Resource Elements occupied by CRS depends on Number of
10421 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10422 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10423 * details of the same. Please note that PDCCH overlap symbols would not
10424 * considered in CRS REs deduction */
10425 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10427 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10428 - numPdcchSym) *RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10431 /* DwPTS Scheduling Changes Start */
10432 antPortIdx = (cell->numTxAntPorts == 1)? 0:
10433 ((cell->numTxAntPorts == 2)? 1: 2);
10435 if (cp == RG_SCH_CMN_NOR_CP)
10437 splSfIdx = (splSubfrmIdx == 4)? 1: 0;
10441 splSfIdx = (splSubfrmIdx == 3)? 1: 0;
10444 numCrs = rgSchCmnDwptsCrs[splSfIdx][antPortIdx];
10446 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI-1; cfi++)
10448 /* If CFI is 2 and Ant Port is 4, don't consider the sym 1 CRS REs */
10449 if (antPortIdx == 2 && cfi == 2)
10453 cellSch->dl.numReDwPts[cfi] = ((cell->splSubfrmCfg.dwPts - cfi)*
10454 RB_SCH_CMN_NUM_SCS_PER_RB) - numCrs;
10456 /* DwPTS Scheduling Changes End */
10458 if (cfg->maxDlBwPerUe == 0)
10460 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10464 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10466 if (cfg->maxDlRetxBw == 0)
10468 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10472 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10474 /* Fix: MUE_PERTTI_DL*/
10475 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10476 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10477 if (cfg->maxUePerDlSf == 0)
10479 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10481 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10482 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10483 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10485 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10486 "Invalid configuration !: "
10487 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10488 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10492 else if (!cfg->maxCcchPerDlSf)
10494 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10495 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10496 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10497 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10498 * FLE crash in PHY as PHY has limit of 16 max*/
10499 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10503 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10505 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10510 /*ccpu00118273 - ADD - start */
10511 cmLListInit(&cellSch->dl.msg4RetxLst);
10513 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10516 #ifdef RG_PHASE2_SCHED
10517 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10519 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10521 if (cfg->dlfsCfg.isDlFreqSel)
10523 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10529 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10532 /* Power related configuration */
10533 ret = rgSCHPwrCellCfg(cell, cfg);
10539 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10540 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10541 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10542 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10543 cellSch->dl.msg4pAVal = cfg->msg4pAVal;
10546 #else /* LTE_TDD */
10548 * @brief This function handles the configuration of cell for the first
10549 * time by the scheduler.
10553 * Function: rgSCHCmnDlRgrCellCfg
10554 * Purpose: Configuration received is stored into the data structures
10555 * Also, update the scheduler with the number of frames of
10556 * RACH preamble transmission.
10558 * Invoked by: BO and Scheduler
10560 * @param[in] RgSchCellCb* cell
10561 * @param[in] RgrCellCfg* cfg
10562 * @param[in] RgSchErrInfo* err
10567 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10574 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10581 RgSchCmnCell *cellSch;
10588 TRC2(rgSCHCmnDlRgrCellCfg);
10590 cellSch = RG_SCH_CMN_GET_CELL(cell);
10592 /* Initialize the parameters with the ones received in the */
10593 /* configuration. */
10595 /* Added matrix 'rgRaPrmblToRaFrmTbl' for computation of RA
10596 * sub-frames from preamble format */
10597 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->rachCfg.preambleFormat];
10599 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10600 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10602 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10603 3 TTI (MAX L1+L2 processing delay at the UE) */
10604 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10605 rgSchCmnHarqRtt[7] + 3;
10607 if (cell->bwCfg.dlTotalBw <= 10)
10618 if (cell->isCpDlExtend == TRUE)
10620 cp = RG_SCH_CMN_EXT_CP;
10625 cp = RG_SCH_CMN_NOR_CP;
10629 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10630 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, cfiIdx++)
10632 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10634 cellSch->dl.emtcCqiToTbsTbl[0][cfi] = rgSchEmtcCmnCqiToTbs[0][cp][cfiIdx];
10636 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10637 [cell->numTxAntPorts]][cfiIdx];
10638 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10640 cellSch->dl.emtcCqiToTbsTbl[1][cfi] = rgSchEmtcCmnCqiToTbs[1][cp][cfiIdx];
10642 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10643 [cell->numTxAntPorts]][cfiIdx];
10646 /* Initializing the values of CFI parameters */
10647 if(cell->dynCfiCb.isDynCfiEnb)
10649 /* If DCFI is enabled, current CFI value will start from 1 */
10650 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10654 /* If DCFI is disabled, current CFI value is set as default CFI value */
10655 cellSch->dl.currCfi = cellSch->cfiCfg.cfi;
10656 cellSch->dl.newCfi = cellSch->dl.currCfi;
10659 /* Include CRS REs while calculating Efficiency
10660 * The number of Resource Elements occupied by CRS depends on Number of
10661 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10662 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10663 * details of the same. Please note that PDCCH overlap symbols would not
10664 * considered in CRS REs deduction */
10665 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10667 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10668 - numPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10671 if (cfg->maxDlBwPerUe == 0)
10673 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10677 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10679 if (cfg->maxDlRetxBw == 0)
10681 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10685 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10688 /* Fix: MUE_PERTTI_DL*/
10689 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10690 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10691 if (cfg->maxUePerDlSf == 0)
10693 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10695 /* Fix: MUE_PERTTI_DL syed validating Cell Configuration */
10696 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10698 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10699 "FAILED MaxUePerDlSf(%u) < MaxDlUeNewTxPerTti(%u)",
10700 cellSch->dl.maxUePerDlSf,
10701 cellSch->dl.maxUeNewTxPerTti);
10704 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10705 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10707 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid configuration !: "
10708 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10709 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10713 else if (!cfg->maxCcchPerDlSf)
10715 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10716 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10717 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10718 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10719 * FLE crash in PHY as PHY has limit of 16 max*/
10720 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10724 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10728 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10732 cmLListInit(&cellSch->dl.msg4RetxLst);
10734 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10737 #ifdef RG_PHASE2_SCHED
10738 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10740 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10742 if (cfg->dlfsCfg.isDlFreqSel)
10744 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10750 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10753 /* Power related configuration */
10754 ret = rgSCHPwrCellCfg(cell, cfg);
10760 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10761 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10762 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10763 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10764 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10767 #endif /* LTE_TDD */
10769 /***********************************************************
10771 * Func : rgSCHCmnUlCalcReqRbCeil
10773 * Desc : Calculate RB required to satisfy 'bytes' for
10775 * Returns number of RBs such that requirement
10776 * is necessarily satisfied (does a 'ceiling'
10779 * Ret : Required RBs (U8)
10785 **********************************************************/
10787 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil
10791 RgSchCmnUlCell *cellUl
10794 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil(bytes, cqi, cellUl)
10797 RgSchCmnUlCell *cellUl;
10800 U32 numRe = RGSCH_CEIL((bytes * 8) * 1024, rgSchCmnUlCqiTbl[cqi].eff);
10801 TRC2(rgSCHCmnUlCalcReqRbCeil);
10802 RETVALUE((U8)RGSCH_CEIL(numRe, RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl)));
10805 /***********************************************************
10807 * Func : rgSCHCmnPrecompMsg3Vars
10809 * Desc : Precomputes the following for msg3 allocation:
10810 * 1. numSb and Imcs for msg size A
10811 * 2. numSb and Imcs otherwise
10815 * Notes: The corresponding vars in cellUl struct is filled
10820 **********************************************************/
10822 PRIVATE S16 rgSCHCmnPrecompMsg3Vars
10824 RgSchCmnUlCell *cellUl,
10831 PRIVATE S16 rgSCHCmnPrecompMsg3Vars(cellUl, ccchCqi, msgSzA, sbSize, isEcp)
10832 RgSchCmnUlCell *cellUl;
10844 U16 msg3GrntSz = 0;
10846 TRC2(rgSCHCmnPrecompMsg3Vars);
10848 if (ccchCqi > cellUl->max16qamCqi)
10850 ccchCqi = cellUl->max16qamCqi;
10852 /* #ifndef RG_SCH_CMN_EXP_CP_SUP For ECP Pick the index 1 */
10854 ccchTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10855 ccchMcs = rgSCHCmnUlGetIMcsFrmITbs(ccchTbs, CM_LTE_UE_CAT_1);
10857 /* MCS should fit in 4 bits in RAR */
10863 /* Limit the ccchMcs to 15 as it
10864 * can be inferred from 36.213, section 6.2 that msg3 imcs
10866 * Since, UE doesn't exist right now, we use CAT_1 for ue
10868 while((ccchMcs = (rgSCHCmnUlGetIMcsFrmITbs(
10869 rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi],CM_LTE_UE_CAT_1))
10871 RG_SCH_CMN_MAX_MSG3_IMCS)
10876 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10878 if (msgSzA < RGSCH_MIN_MSG3_GRNT_SZ)
10882 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(msgSzA, ccchCqi, cellUl), sbSize);
10884 numRb = numSb * sbSize;
10885 msg3GrntSz = 8 * msgSzA;
10887 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10890 numRb = numSb * sbSize;
10892 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10896 /* Reversed(Corrected) the assignment for preamble-GrpA
10897 * Refer- TG36.321- section- 5.1.2*/
10898 cellUl->ra.prmblBNumSb = numSb;
10899 cellUl->ra.prmblBIMcs = ccchMcs;
10900 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(RGSCH_MIN_MSG3_GRNT_SZ, \
10904 numRb = numSb * sbSize;
10905 msg3GrntSz = 8 * RGSCH_MIN_MSG3_GRNT_SZ;
10906 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10909 numRb = numSb * sbSize;
10911 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10915 /* Reversed(Corrected) the assignment for preamble-GrpA
10916 * Refer- TG36.321- section- 5.1.2*/
10917 cellUl->ra.prmblANumSb = numSb;
10918 cellUl->ra.prmblAIMcs = ccchMcs;
10922 PUBLIC U32 gPrntPucchDet=0;
10925 /***********************************************************
10927 * Func : rgSCHCmnUlCalcAvailBw
10929 * Desc : Calculates bandwidth available for PUSCH scheduling.
10931 * Ret : S16 (ROK/RFAILED)
10937 **********************************************************/
10939 PRIVATE S16 rgSCHCmnUlCalcAvailBw
10942 RgrCellCfg *cellCfg,
10948 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
10950 RgrCellCfg *cellCfg;
10957 U8 ulBw = cell->bwCfg.ulTotalBw;
10958 U8 n2Rb = cell->pucchCfg.resourceSize;
10959 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
10960 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
10961 U8 n1Cs = cell->pucchCfg.cyclicShift;
10968 U8 exclRb; /* RBs to exclude */
10971 /* To avoid PUCCH and PUSCH collision issue */
10975 /* Maximum value of M as per Table 10.1-1 */
10976 U8 M[RGSCH_MAX_TDD_UL_DL_CFG] = {1, 2, 4, 3, 4, 9, 1};
10978 TRC2(rgSCHCmnUlCalcAvailBw);
10980 if (cell->isCpUlExtend)
10985 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
10987 /* Considering the max no. of CCEs for PUSCH BW calculation
10988 * based on min mi value */
10989 if (cell->ulDlCfgIdx == 0 || cell->ulDlCfgIdx == 6)
10998 totalCce = cell->dynCfiCb.cfi2NCceTbl[mi][cfi];
11000 P = rgSCHCmnGetPValFrmCCE(cell, totalCce-1);
11001 n1PlusOne = cell->rgSchTddNpValTbl[P + 1];
11002 n1Max = (M[cell->ulDlCfgIdx] - 1)*n1PlusOne + (totalCce-1) + n1Pucch;
11004 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11006 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11007 n1Rb = (n1Max - n1RbPart)/ n1PerRb;
11008 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11010 /* get the total Number of RB's to be excluded for PUSCH */
11012 if(n1Pucch < n1RbPart)
11018 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11020 puschRbStart = exclRb/2 + 1;
11022 /* Num of PUCCH RBs = puschRbStart*2 */
11023 if (puschRbStart * 2 >= ulBw)
11025 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11029 *rbStartRef = puschRbStart;
11030 *bwAvailRef = ulBw - puschRbStart * 2;
11032 if(cell->pucchCfg.maxPucchRb !=0 &&
11033 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11035 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11042 /***********************************************************
11044 * Func : rgSCHCmnUlCalcAvailBw
11046 * Desc : Calculates bandwidth available for PUSCH scheduling.
11048 * Ret : S16 (ROK/RFAILED)
11054 **********************************************************/
11056 PRIVATE S16 rgSCHCmnUlCalcAvailBw
11059 RgrCellCfg *cellCfg,
11065 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
11067 RgrCellCfg *cellCfg;
11074 U8 ulBw = cell->bwCfg.ulTotalBw;
11075 U8 n2Rb = cell->pucchCfg.resourceSize;
11076 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
11077 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
11078 U8 n1Cs = cell->pucchCfg.cyclicShift;
11084 U8 exclRb; /* RBs to exclude */
11088 U16 numOfN3PucchRb;
11089 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11092 TRC2(rgSCHCmnUlCalcAvailBw);
11094 if (cell->isCpUlExtend)
11099 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
11101 totalCce = cell->dynCfiCb.cfi2NCceTbl[0][cfi];
11103 n1Max = n1Pucch + totalCce-1;
11105 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11107 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11108 n1Rb = (U8)((n1Max - n1RbPart) / n1PerRb);
11109 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11111 /* get the total Number of RB's to be excluded for PUSCH */
11113 if(n1Pucch < n1RbPart)
11119 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11121 /*Support for PUCCH Format 3*/
11123 if (cell->isPucchFormat3Sptd)
11125 numOfN3PucchRb = RGSCH_CEIL(cellSch->dl.maxUePerDlSf,5);
11126 exclRb = exclRb + numOfN3PucchRb;
11129 puschRbStart = exclRb/2 + 1;
11133 #ifndef ALIGN_64BIT
11134 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%ld:%d:%d:%d:%d:%d]\n",
11135 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11137 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%d:%d:%d:%d:%d:%d]\n",
11138 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11142 if (puschRbStart*2 >= ulBw)
11144 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11148 *rbStartRef = puschRbStart;
11149 *bwAvailRef = ulBw - puschRbStart * 2;
11151 if(cell->pucchCfg.maxPucchRb !=0 &&
11152 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11154 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11163 /***********************************************************
11165 * Func : rgSCHCmnUlCellInit
11167 * Desc : Uplink scheduler initialisation for cell.
11175 **********************************************************/
11177 PRIVATE S16 rgSCHCmnUlCellInit
11180 RgrCellCfg *cellCfg
11183 PRIVATE S16 rgSCHCmnUlCellInit(cell, cellCfg)
11185 RgrCellCfg *cellCfg;
11189 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11190 U8 maxUePerUlSf = cellCfg->maxUePerUlSf;
11192 /* Added configuration for maximum number of MSG3s */
11193 U8 maxMsg3PerUlSf = cellCfg->maxMsg3PerUlSf;
11195 U8 maxUlBwPerUe = cellCfg->maxUlBwPerUe;
11196 U8 sbSize = cellCfg->puschSubBand.size;
11204 U16 ulDlCfgIdx = cell->ulDlCfgIdx;
11205 /* [ccpu00127294]-MOD-Change the max Ul subfrms size in TDD */
11206 U8 maxSubfrms = 2 * rgSchTddNumUlSf[ulDlCfgIdx];
11207 U8 ulToDlMap[12] = {0}; /* maximum 6 Subframes in UL * 2 */
11208 U8 maxUlsubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
11209 [RGSCH_NUM_SUB_FRAMES-1];
11213 U8 maxSubfrms = RG_SCH_CMN_UL_NUM_SF;
11219 #if (defined(LTE_L2_MEAS) )
11220 Inst inst = cell->instIdx;
11221 #endif /* #if (defined(LTE_L2_MEAS) || defined(DEBUGP) */
11222 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
11224 TRC2(rgSCHCmnUlCellInit);
11226 cellUl->maxUeNewTxPerTti = cellCfg->maxUlUeNewTxPerTti;
11227 if (maxUePerUlSf == 0)
11229 maxUePerUlSf = RG_SCH_CMN_MAX_UE_PER_UL_SF;
11232 if (maxMsg3PerUlSf == 0)
11234 maxMsg3PerUlSf = RG_SCH_CMN_MAX_MSG3_PER_UL_SF;
11236 /* fixed the problem while sending raRsp
11237 * if maxMsg3PerUlSf is greater than
11238 * RGSCH_MAX_RNTI_PER_RARNTI
11240 if(maxMsg3PerUlSf > RGSCH_MAX_RNTI_PER_RARNTI)
11242 maxMsg3PerUlSf = RGSCH_MAX_RNTI_PER_RARNTI;
11245 if(maxMsg3PerUlSf > maxUePerUlSf)
11247 maxMsg3PerUlSf = maxUePerUlSf;
11250 /*cellUl->maxAllocPerUlSf = maxUePerUlSf + maxMsg3PerUlSf;*/
11251 /*Max MSG3 should be a subset of Max UEs*/
11252 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11253 cellUl->maxMsg3PerUlSf = maxMsg3PerUlSf;
11255 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11257 /* Fix: MUE_PERTTI_UL syed validating Cell Configuration */
11258 if (cellUl->maxAllocPerUlSf < cellUl->maxUeNewTxPerTti)
11260 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
11261 "FAILED: MaxUePerUlSf(%u) < MaxUlUeNewTxPerTti(%u)",
11262 cellUl->maxAllocPerUlSf,
11263 cellUl->maxUeNewTxPerTti);
11269 for(idx = 0; idx < RGSCH_SF_ALLOC_SIZE; idx++)
11271 for(idx = 0; idx < RGSCH_NUM_SUB_FRAMES; idx++)
11275 ret = rgSCHUtlAllocSBuf(inst, (Data **)&(cell->sfAllocArr[idx].
11276 ulUeInfo.ulAllocInfo), (cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc)));
11279 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation failed ");
11284 if (maxUlBwPerUe == 0)
11286 /* ccpu00139362- Setting to configured UL BW instead of MAX BW(100)*/
11287 maxUlBwPerUe = cell->bwCfg.ulTotalBw;
11289 cellUl->maxUlBwPerUe = maxUlBwPerUe;
11291 /* FOR RG_SCH_CMN_EXT_CP_SUP */
11292 if (!cellCfg->isCpUlExtend)
11294 cellUl->ulNumRePerRb = 12 * (14 - RGSCH_UL_SYM_DMRS_SRS);
11298 cellUl->ulNumRePerRb = 12 * (12 - RGSCH_UL_SYM_DMRS_SRS);
11301 if (sbSize != rgSchCmnMult235Tbl[sbSize].match)
11303 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Invalid subband size %d", sbSize);
11306 //Setting the subband size to 4 which is size of VRBG in 5GTF
11308 sbSize = MAX_5GTF_VRBG_SIZE;
11311 maxSbPerUe = maxUlBwPerUe / sbSize;
11312 if (maxSbPerUe == 0)
11314 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnUlCellInit(): "
11315 "maxUlBwPerUe/sbSize is zero");
11318 cellUl->maxSbPerUe = rgSchCmnMult235Tbl[maxSbPerUe].prvMatch;
11320 /* CQI related updations */
11321 if ((!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->ulCmnCodeRate.ccchCqi))
11322 || (!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->trgUlCqi.trgCqi)))
11324 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnUlCellInit(): "
11328 cellUl->dfltUlCqi = cellCfg->ulCmnCodeRate.ccchCqi;
11330 /* Changed the logic to determine maxUlCqi.
11331 * For a 16qam UE, maxUlCqi is the CQI Index at which
11332 * efficiency is as close as possible to RG_SCH_MAX_CODE_RATE_16QAM
11333 * Refer to 36.213-8.6.1 */
11334 for (i = RG_SCH_CMN_UL_NUM_CQI - 1;i > 0; --i)
11336 RLOG_ARG2(L_INFO,DBG_CELLID,cell->cellId,
11339 rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i]);
11340 #ifdef MAC_SCH_STATS
11341 /* ccpu00128489 ADD Update mcs in hqFailStats here instead of at CRC
11342 * since CQI to MCS mapping does not change. The only exception is for
11343 * ITBS = 19 where the MCS can be 20 or 21 based on the UE cat. We
11344 * choose 20, instead of 21, ie UE_CAT_3 */
11345 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11346 RG_SCH_CMN_UL_TBS_TO_MCS(iTbs, hqFailStats.ulCqiStat[i - 1].mcs);
11349 for (i = RG_SCH_CMN_UL_NUM_CQI - 1; i != 0; --i)
11351 /* Fix for ccpu00123912*/
11352 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11353 if (iTbs <= RGSCH_UL_16QAM_MAX_ITBS) /* corresponds to 16QAM */
11355 RLOG_ARG1(L_INFO,DBG_CELLID,cell->cellId,
11356 "16 QAM CQI %u", i);
11357 cellUl->max16qamCqi = i;
11363 /* Precompute useful values for RA msg3 */
11364 ret = rgSCHCmnPrecompEmtcMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11365 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11372 /* Precompute useful values for RA msg3 */
11373 ret = rgSCHCmnPrecompMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11374 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11380 cellUl->sbSize = sbSize;
11383 cellUl->numUlSubfrms = maxSubfrms;
11385 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->ulSfArr,
11386 cellUl->numUlSubfrms * sizeof(RgSchUlSf));
11390 cellUl->numUlSubfrms = 0;
11394 /* store the DL subframe corresponding to the PUSCH offset
11395 * in their respective UL subframe */
11396 for(i=0; i < RGSCH_NUM_SUB_FRAMES; i++)
11398 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][i] != 0)
11400 subfrm = (i + rgSchTddPuschTxKTbl[ulDlCfgIdx][i]) % \
11401 RGSCH_NUM_SUB_FRAMES;
11402 subfrm = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subfrm]-1;
11403 dlIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][i]-1;
11404 RGSCH_ARRAY_BOUND_CHECK( cell->instIdx, ulToDlMap, subfrm);
11405 ulToDlMap[subfrm] = dlIdx;
11408 /* Copy the information in the remaining UL subframes based
11409 * on number of HARQ processes */
11410 for(i=maxUlsubfrms; i < maxSubfrms; i++)
11412 subfrm = i-maxUlsubfrms;
11413 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, i);
11414 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, subfrm)
11415 ulToDlMap[i] = ulToDlMap[subfrm];
11419 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++)
11422 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11424 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11433 cell->ulAvailBw = bwAvail;
11436 numSb = bwAvail/sbSize;
11438 cell->dynCfiCb.bwInfo[cfi].startRb = rbStart;
11439 cell->dynCfiCb.bwInfo[cfi].numSb = numSb;
11442 if(0 == cell->dynCfiCb.maxCfi)
11444 RLOG_ARG3(L_ERROR,DBG_CELLID,cell->cellId,
11445 "Incorrect Default CFI(%u), maxCfi(%u), maxPucchRb(%d)",
11446 cellSch->cfiCfg.cfi, cell->dynCfiCb.maxCfi,
11447 cell->pucchCfg.maxPucchRb);
11453 cellUl->dmrsArrSize = cell->dynCfiCb.bwInfo[1].numSb;
11454 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->dmrsArr,
11455 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11460 for (i = 0; i < cellUl->dmrsArrSize; ++i)
11462 cellUl->dmrsArr[i] = cellCfg->puschSubBand.dmrs[i];
11465 /* Init subframes */
11466 for (i = 0; i < maxSubfrms; ++i)
11468 ret = rgSCHUtlUlSfInit(cell, &cellUl->ulSfArr[i], i,
11469 cellUl->maxAllocPerUlSf);
11472 for (; i != 0; --i)
11474 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[i-1]);
11476 /* ccpu00117052 - MOD - Passing double pointer
11477 for proper NULLP assignment*/
11478 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)(&(cellUl->dmrsArr)),
11479 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11481 /* ccpu00117052 - MOD - Passing double pointer
11482 for proper NULLP assignment*/
11483 rgSCHUtlFreeSBuf(cell->instIdx,
11484 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11489 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cellUl);
11494 * @brief Scheduler processing on cell configuration.
11498 * Function : rgSCHCmnRgrCellCfg
11500 * This function does requisite initialisation
11501 * and setup for scheduler1 when a cell is
11504 * @param[in] RgSchCellCb *cell
11505 * @param[in] RgrCellCfg *cellCfg
11506 * @param[out] RgSchErrInfo *err
11512 PUBLIC S16 rgSCHCmnRgrCellCfg
11515 RgrCellCfg *cellCfg,
11519 PUBLIC S16 rgSCHCmnRgrCellCfg(cell, cellCfg, err)
11521 RgrCellCfg *cellCfg;
11526 RgSchCmnCell *cellSch;
11527 TRC2(rgSCHCmnRgrCellCfg);
11529 /* As part of RGR cell configuration, validate the CRGCellCfg
11530 * There is no trigger for crgCellCfg from SC1 */
11531 /* Removed failure check for Extended CP */
11533 if (((ret = rgSCHUtlAllocSBuf(cell->instIdx,
11534 (Data**)&(cell->sc.sch), (sizeof(RgSchCmnCell)))) != ROK))
11536 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
11537 "Memory allocation FAILED");
11538 err->errCause = RGSCHERR_SCH_CFG;
11541 cellSch = (RgSchCmnCell *)(cell->sc.sch);
11542 cellSch->cfiCfg = cellCfg->cfiCfg;
11543 cellSch->trgUlCqi.trgCqi = cellCfg->trgUlCqi.trgCqi;
11544 /* Initialize the scheduler refresh timer queues */
11545 cellSch->tmrTqCp.nxtEnt = 0;
11546 cellSch->tmrTqCp.tmrLen = RG_SCH_CMN_NUM_REFRESH_Q;
11548 /* RACHO Intialize the RACH ded Preamble Information */
11549 rgSCHCmnCfgRachDedPrm(cell);
11551 /* Initialize 'Np' value for each 'p' used for
11552 * HARQ ACK/NACK reception */
11553 rgSCHCmnDlNpValInit(cell);
11556 /* Initialize 'Np' value for each 'p' used for
11557 * HARQ ACK/NACK reception */
11559 rgSCHCmnDlNpValInit(cell);
11562 /* Now perform uplink related initializations */
11563 ret = rgSCHCmnUlCellInit(cell, cellCfg);
11566 /* There is no downlink deinit to be performed */
11567 err->errCause = RGSCHERR_SCH_CFG;
11570 ret = rgSCHCmnDlRgrCellCfg(cell, cellCfg, err);
11573 err->errCause = RGSCHERR_SCH_CFG;
11576 /* DL scheduler has no initializations to make */
11577 /* As of now DL scheduler always returns ROK */
11579 rgSCHCmnGetDciFrmtSizes(cell);
11580 rgSCHCmnGetCqiDciFrmt2AggrLvl(cell);
11582 rgSCHCmnGetEmtcDciFrmtSizes(cell);
11583 rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl(cell);
11584 #endif /* EMTC_ENABLE */
11587 if(TRUE == cellCfg->emtcEnable)
11589 cellSch->apisEmtcUl = &rgSchEmtcUlSchdTbl[0];
11590 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11597 cellSch->apisUl = &rgSchUlSchdTbl[RG_SCH_CMN_GET_UL_SCHED_TYPE(cell)];
11598 ret = cellSch->apisUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11604 if(TRUE == cellCfg->emtcEnable)
11606 cellSch->apisEmtcDl = &rgSchEmtcDlSchdTbl[0];
11607 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11614 cellSch->apisDl = &rgSchDlSchdTbl[RG_SCH_CMN_GET_DL_SCHED_TYPE(cell)];
11616 /* Perform SPS specific initialization for the cell */
11617 ret = rgSCHCmnSpsCellCfg(cell, cellCfg, err);
11623 ret = cellSch->apisDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11628 rgSCHCmnInitVars(cell);
11631 } /* rgSCHCmnRgrCellCfg*/
11635 * @brief This function handles the reconfiguration of cell.
11639 * Function: rgSCHCmnRgrCellRecfg
11640 * Purpose: Update the reconfiguration parameters.
11642 * Invoked by: Scheduler
11644 * @param[in] RgSchCellCb* cell
11649 PUBLIC S16 rgSCHCmnRgrCellRecfg
11652 RgrCellRecfg *recfg,
11656 PUBLIC S16 rgSCHCmnRgrCellRecfg(cell, recfg, err)
11658 RgrCellRecfg *recfg;
11663 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11664 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11666 TRC2(rgSCHCmnRgrCellRecfg);
11668 if (recfg->recfgTypes & RGR_CELL_UL_CMNRATE_RECFG)
11670 U8 oldCqi = cellUl->dfltUlCqi;
11671 if (!RG_SCH_CMN_UL_IS_CQI_VALID(recfg->ulCmnCodeRate.ccchCqi))
11673 err->errCause = RGSCHERR_SCH_CFG;
11674 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnRgrCellRecfg(): "
11678 cellUl->dfltUlCqi = recfg->ulCmnCodeRate.ccchCqi;
11679 ret = rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11680 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11683 cellUl->dfltUlCqi = oldCqi;
11684 rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11685 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11690 if (recfg->recfgTypes & RGR_CELL_DL_CMNRATE_RECFG)
11692 if (rgSCHCmnDlCnsdrCmnRt(cell, &recfg->dlCmnCodeRate) != ROK)
11694 err->errCause = RGSCHERR_SCH_CFG;
11700 if(TRUE == cell->emtcEnable)
11702 /* Invoke UL sched for cell Recfg */
11703 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11709 /* Invoke DL sched for cell Recfg */
11710 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11719 /* Invoke UL sched for cell Recfg */
11720 ret = cellSch->apisUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11726 /* Invoke DL sched for cell Recfg */
11727 ret = cellSch->apisDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11734 if (recfg->recfgTypes & RGR_CELL_DLFS_RECFG)
11736 ret = cellSch->apisDlfs->rgSCHDlfsCellRecfg(cell, recfg, err);
11741 cellSch->dl.isDlFreqSel = recfg->dlfsRecfg.isDlFreqSel;
11744 if (recfg->recfgTypes & RGR_CELL_PWR_RECFG)
11746 ret = rgSCHPwrCellRecfg(cell, recfg);
11756 /***********************************************************
11758 * Func : rgSCHCmnUlCellDeinit
11760 * Desc : Uplink scheduler de-initialisation for cell.
11768 **********************************************************/
11770 PRIVATE Void rgSCHCmnUlCellDeinit
11775 PRIVATE Void rgSCHCmnUlCellDeinit(cell)
11779 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11782 U8 maxSubfrms = cellUl->numUlSubfrms;
11785 CmLList *lnk = NULLP;
11786 RgSchL2MeasCb *measCb;
11788 TRC2(rgSCHCmnUlCellDeinit);
11791 for(ulSfIdx = 0; ulSfIdx < RGSCH_SF_ALLOC_SIZE; ulSfIdx++)
11793 for(ulSfIdx = 0; ulSfIdx < RGSCH_NUM_SUB_FRAMES; ulSfIdx++)
11796 if(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo != NULLP)
11798 /* ccpu00117052 - MOD - Passing double pointer
11799 for proper NULLP assignment*/
11800 rgSCHUtlFreeSBuf(cell->instIdx,
11801 (Data **)(&(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo)),
11802 cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc));
11804 /* ccpu00117052 - DEL - removed explicit NULLP assignment
11805 as it is done in above utility function */
11808 /* Free the memory allocated to measCb */
11809 lnk = cell->l2mList.first;
11810 while(lnk != NULLP)
11812 measCb = (RgSchL2MeasCb *)lnk->node;
11813 cmLListDelFrm(&cell->l2mList, lnk);
11815 /* ccpu00117052 - MOD - Passing double pointer
11816 for proper NULLP assignment*/
11817 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&measCb,\
11818 sizeof(RgSchL2MeasCb));
11821 if (cellUl->dmrsArr != NULLP)
11823 /* ccpu00117052 - MOD - Passing double pointer
11824 for proper NULLP assignment*/
11825 rgSCHUtlFreeSBuf(cell->instIdx,(Data **)(&(cellUl->dmrsArr)),
11826 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11828 /* De-init subframes */
11830 for (ulSfIdx = 0; ulSfIdx < maxSubfrms; ++ulSfIdx)
11832 for (ulSfIdx = 0; ulSfIdx < RG_SCH_CMN_UL_NUM_SF; ++ulSfIdx)
11835 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[ulSfIdx]);
11839 if (cellUl->ulSfArr != NULLP)
11841 /* ccpu00117052 - MOD - Passing double pointer
11842 for proper NULLP assignment*/
11843 rgSCHUtlFreeSBuf(cell->instIdx,
11844 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11852 * @brief Scheduler processing for cell delete.
11856 * Function : rgSCHCmnCellDel
11858 * This functions de-initialises and frees memory
11859 * taken up by scheduler1 for the entire cell.
11861 * @param[in] RgSchCellCb *cell
11865 PUBLIC Void rgSCHCmnCellDel
11870 PUBLIC Void rgSCHCmnCellDel(cell)
11874 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11875 TRC2(rgSCHCmnCellDel);
11880 if (cellSch == NULLP)
11884 /* Perform the deinit for the UL scheduler */
11885 rgSCHCmnUlCellDeinit(cell);
11887 if(TRUE == cell->emtcEnable)
11889 if (cellSch->apisEmtcUl)
11891 cellSch->apisEmtcUl->rgSCHFreeUlCell(cell);
11895 if (cellSch->apisUl)
11897 /* api pointer checks added (here and below in
11898 * this function). pl check. - antriksh */
11899 cellSch->apisUl->rgSCHFreeUlCell(cell);
11902 /* Perform the deinit for the DL scheduler */
11903 cmLListInit(&cellSch->dl.taLst);
11904 if (cellSch->apisDl)
11906 cellSch->apisDl->rgSCHFreeDlCell(cell);
11909 if (cellSch->apisEmtcDl)
11911 rgSCHEmtcInitTaLst(&cellSch->dl);
11913 cellSch->apisEmtcDl->rgSCHFreeDlCell(cell);
11917 /* DLFS de-initialization */
11918 if (cellSch->dl.isDlFreqSel && cellSch->apisDlfs)
11920 cellSch->apisDlfs->rgSCHDlfsCellDel(cell);
11923 rgSCHPwrCellDel(cell);
11925 rgSCHCmnSpsCellDel(cell);
11928 /* ccpu00117052 - MOD - Passing double pointer
11929 for proper NULLP assignment*/
11930 rgSCHUtlFreeSBuf(cell->instIdx,
11931 (Data**)(&(cell->sc.sch)), (sizeof(RgSchCmnCell)));
11933 } /* rgSCHCmnCellDel */
11937 * @brief This function validates QOS parameters for DL.
11941 * Function: rgSCHCmnValidateDlQos
11942 * Purpose: This function validates QOS parameters for DL.
11944 * Invoked by: Scheduler
11946 * @param[in] CrgLchQosCfg *dlQos
11951 PRIVATE S16 rgSCHCmnValidateDlQos
11953 RgrLchQosCfg *dlQos
11956 PRIVATE S16 rgSCHCmnValidateDlQos(dlQos)
11957 RgrLchQosCfg *dlQos;
11960 U8 qci = dlQos->qci;
11962 TRC2(rgSCHCmnValidateDlQos);
11964 if ( qci < RG_SCH_CMN_MIN_QCI || qci > RG_SCH_CMN_MAX_QCI )
11969 if ((qci >= RG_SCH_CMN_GBR_QCI_START) &&
11970 (qci <= RG_SCH_CMN_GBR_QCI_END))
11972 if ((dlQos->mbr == 0) || (dlQos->mbr < dlQos->gbr))
11981 * @brief Scheduler invocation on logical channel addition.
11985 * Function : rgSCHCmnRgrLchCfg
11987 * This functions does required processing when a new
11988 * (dedicated) logical channel is added. Assumes lcg
11989 * pointer in ulLc is set.
11991 * @param[in] RgSchCellCb *cell
11992 * @param[in] RgSchUeCb *ue
11993 * @param[in] RgSchDlLcCb *dlLc
11994 * @param[int] RgrLchCfg *lcCfg
11995 * @param[out] RgSchErrInfo *err
12001 PUBLIC S16 rgSCHCmnRgrLchCfg
12010 PUBLIC S16 rgSCHCmnRgrLchCfg(cell, ue, dlLc, lcCfg, err)
12020 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12022 TRC2(rgSCHCmnRgrLchCfg);
12024 ret = rgSCHUtlAllocSBuf(cell->instIdx,
12025 (Data**)&((dlLc)->sch), (sizeof(RgSchCmnDlSvc)));
12028 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRgrLchCfg(): "
12029 "SCH struct alloc failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12030 err->errCause = RGSCHERR_SCH_CFG;
12033 if(lcCfg->lcType != CM_LTE_LCH_DCCH)
12035 ret = rgSCHCmnValidateDlQos(&lcCfg->dlInfo.dlQos);
12038 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSchCmnCrgLcCfg(): "
12039 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12040 err->errCause = RGSCHERR_SCH_CFG;
12043 /* Perform DL service activation in the scheduler */
12044 ((RgSchCmnDlSvc *)(dlLc->sch))->qci = lcCfg->dlInfo.dlQos.qci;
12045 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = rgSchCmnDlQciPrio[lcCfg->dlInfo.dlQos.qci - 1];
12046 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcCfg->dlInfo.dlQos.gbr * \
12047 RG_SCH_CMN_REFRESH_TIME)/100;
12048 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcCfg->dlInfo.dlQos.mbr * \
12049 RG_SCH_CMN_REFRESH_TIME)/100;
12053 /*assigning highest priority to DCCH */
12054 ((RgSchCmnDlSvc *)(dlLc->sch))->prio=RG_SCH_CMN_DCCH_PRIO;
12057 dlLc->lcType=lcCfg->lcType;
12060 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12062 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcCfg(cell, ue,dlLc ,lcCfg, err);
12071 ret = cellSch->apisDl->rgSCHRgrDlLcCfg(cell, ue, dlLc, lcCfg, err);
12079 if(TRUE == ue->isEmtcUe)
12081 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12090 ret = cellSch->apisUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12100 rgSCHSCellDlLcCfg(cell, ue, dlLc);
12106 if(lcCfg->dlInfo.dlSpsCfg.isSpsEnabled)
12108 /* Invoke SPS module if SPS is enabled for the service */
12109 ret = rgSCHCmnSpsDlLcCfg(cell, ue, dlLc, lcCfg, err);
12112 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "rgSchCmnRgrLchCfg(): "
12113 "SPS configuration failed for DL LC for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12114 err->errCause = RGSCHERR_SCH_CFG;
12124 * @brief Scheduler invocation on logical channel addition.
12128 * Function : rgSCHCmnRgrLchRecfg
12130 * This functions does required processing when an existing
12131 * (dedicated) logical channel is reconfigured. Assumes lcg
12132 * pointer in ulLc is set to the old value.
12133 * Independent of whether new LCG is meant to be configured,
12134 * the new LCG scheduler information is accessed and possibly modified.
12136 * @param[in] RgSchCellCb *cell
12137 * @param[in] RgSchUeCb *ue
12138 * @param[in] RgSchDlLcCb *dlLc
12139 * @param[int] RgrLchRecfg *lcRecfg
12140 * @param[out] RgSchErrInfo *err
12146 PUBLIC S16 rgSCHCmnRgrLchRecfg
12151 RgrLchRecfg *lcRecfg,
12155 PUBLIC S16 rgSCHCmnRgrLchRecfg(cell, ue, dlLc, lcRecfg, err)
12159 RgrLchRecfg *lcRecfg;
12164 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12166 TRC2(rgSCHCmnRgrLchRecfg)
12168 if(dlLc->lcType != CM_LTE_LCH_DCCH)
12170 ret = rgSCHCmnValidateDlQos(&lcRecfg->dlRecfg.dlQos);
12174 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
12175 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12176 err->errCause = RGSCHERR_SCH_CFG;
12179 if (((RgSchCmnDlSvc *)(dlLc->sch))->qci != lcRecfg->dlRecfg.dlQos.qci)
12181 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Qci, hence lc Priority change "
12182 "not supported for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12183 err->errCause = RGSCHERR_SCH_CFG;
12186 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcRecfg->dlRecfg.dlQos.gbr * \
12187 RG_SCH_CMN_REFRESH_TIME)/100;
12188 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcRecfg->dlRecfg.dlQos.mbr * \
12189 RG_SCH_CMN_REFRESH_TIME)/100;
12193 /*assigning highest priority to DCCH */
12194 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = RG_SCH_CMN_DCCH_PRIO;
12198 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12200 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12205 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12214 ret = cellSch->apisDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12219 ret = cellSch->apisUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12227 if (lcRecfg->recfgTypes & RGR_DL_LC_SPS_RECFG)
12229 /* Invoke SPS module if SPS is enabled for the service */
12230 if(lcRecfg->dlRecfg.dlSpsRecfg.isSpsEnabled)
12232 ret = rgSCHCmnSpsDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12235 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"SPS re-configuration not "
12236 "supported for dlLC Ignore this CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12247 * @brief Scheduler invocation on logical channel addition.
12251 * Function : rgSCHCmnRgrLcgCfg
12253 * This functions does required processing when a new
12254 * (dedicated) logical channel is added. Assumes lcg
12255 * pointer in ulLc is set.
12257 * @param[in] RgSchCellCb *cell,
12258 * @param[in] RgSchUeCb *ue,
12259 * @param[in] RgSchLcgCb *lcg,
12260 * @param[in] RgrLcgCfg *lcgCfg,
12261 * @param[out] RgSchErrInfo *err
12267 PUBLIC S16 rgSCHCmnRgrLcgCfg
12276 PUBLIC S16 rgSCHCmnRgrLcgCfg(cell, ue, lcg, lcgCfg, err)
12285 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12286 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCfg->ulInfo.lcgId].sch));
12288 TRC2(rgSCHCmnRgrLcgCfg);
12290 ulLcg->cfgdGbr = (lcgCfg->ulInfo.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12291 ulLcg->effGbr = ulLcg->cfgdGbr;
12292 ulLcg->deltaMbr = ((lcgCfg->ulInfo.mbr - lcgCfg->ulInfo.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12293 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12296 if(TRUE == ue->isEmtcUe)
12298 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12307 ret = cellSch->apisUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12313 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12315 /* Indicate MAC that this LCG is GBR LCG */
12316 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcgCfg->ulInfo.lcgId, TRUE);
12322 * @brief Scheduler invocation on logical channel addition.
12326 * Function : rgSCHCmnRgrLcgRecfg
12328 * This functions does required processing when a new
12329 * (dedicated) logical channel is added. Assumes lcg
12330 * pointer in ulLc is set.
12332 * @param[in] RgSchCellCb *cell,
12333 * @param[in] RgSchUeCb *ue,
12334 * @param[in] RgSchLcgCb *lcg,
12335 * @param[in] RgrLcgRecfg *reCfg,
12336 * @param[out] RgSchErrInfo *err
12342 PUBLIC S16 rgSCHCmnRgrLcgRecfg
12347 RgrLcgRecfg *reCfg,
12351 PUBLIC S16 rgSCHCmnRgrLcgRecfg(cell, ue, lcg, reCfg, err)
12355 RgrLcgRecfg *reCfg;
12360 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12361 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[reCfg->ulRecfg.lcgId].sch));
12363 TRC2(rgSCHCmnRgrLcgRecfg);
12365 ulLcg->cfgdGbr = (reCfg->ulRecfg.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12366 ulLcg->effGbr = ulLcg->cfgdGbr;
12367 ulLcg->deltaMbr = ((reCfg->ulRecfg.mbr - reCfg->ulRecfg.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12368 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12371 if(TRUE == ue->isEmtcUe)
12373 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12382 ret = cellSch->apisUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12388 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12390 /* Indicate MAC that this LCG is GBR LCG */
12391 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, TRUE);
12395 /* In case of RAB modification */
12396 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, FALSE);
12401 /***********************************************************
12403 * Func : rgSCHCmnRgrLchDel
12405 * Desc : Scheduler handling for a (dedicated)
12406 * uplink logical channel being deleted.
12413 **********************************************************/
12415 PUBLIC S16 rgSCHCmnRgrLchDel
12423 PUBLIC S16 rgSCHCmnRgrLchDel(cell, ue, lcId, lcgId)
12430 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12431 TRC2(rgSCHCmnRgrLchDel);
12433 if(TRUE == ue->isEmtcUe)
12435 cellSch->apisEmtcUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12440 cellSch->apisUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12445 /***********************************************************
12447 * Func : rgSCHCmnLcgDel
12449 * Desc : Scheduler handling for a (dedicated)
12450 * uplink logical channel being deleted.
12458 **********************************************************/
12460 PUBLIC Void rgSCHCmnLcgDel
12467 PUBLIC Void rgSCHCmnLcgDel(cell, ue, lcg)
12473 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12474 RgSchCmnLcg *lcgCmn = RG_SCH_CMN_GET_UL_LCG(lcg);
12475 TRC2(rgSCHCmnLcgDel);
12477 if (lcgCmn == NULLP)
12482 if (RGSCH_IS_GBR_BEARER(lcgCmn->cfgdGbr))
12484 /* Indicate MAC that this LCG is GBR LCG */
12485 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcg->lcgId, FALSE);
12489 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
12491 rgSCHCmnSpsUlLcgDel(cell, ue, lcg);
12493 #endif /* LTEMAC_SPS */
12495 lcgCmn->effGbr = 0;
12496 lcgCmn->reportedBs = 0;
12497 lcgCmn->cfgdGbr = 0;
12498 /* set lcg bs to 0. Deletion of control block happens
12499 * at the time of UE deletion. */
12502 if(TRUE == ue->isEmtcUe)
12504 cellSch->apisEmtcUl->rgSCHFreeUlLcg(cell, ue, lcg);
12509 cellSch->apisUl->rgSCHFreeUlLcg(cell, ue, lcg);
12516 * @brief This function deletes a service from scheduler.
12520 * Function: rgSCHCmnFreeDlLc
12521 * Purpose: This function is made available through a FP for
12522 * making scheduler aware of a service being deleted from UE.
12524 * Invoked by: BO and Scheduler
12526 * @param[in] RgSchCellCb* cell
12527 * @param[in] RgSchUeCb* ue
12528 * @param[in] RgSchDlLcCb* svc
12533 PUBLIC Void rgSCHCmnFreeDlLc
12540 PUBLIC Void rgSCHCmnFreeDlLc(cell, ue, svc)
12546 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12547 TRC2(rgSCHCmnFreeDlLc);
12548 if (svc->sch == NULLP)
12553 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12555 cellSch->apisEmtcDl->rgSCHFreeDlLc(cell, ue, svc);
12560 cellSch->apisDl->rgSCHFreeDlLc(cell, ue, svc);
12566 rgSCHSCellDlLcDel(cell, ue, svc);
12571 /* If SPS service, invoke SPS module */
12572 if (svc->dlLcSpsCfg.isSpsEnabled)
12574 rgSCHCmnSpsDlLcDel(cell, ue, svc);
12578 /* ccpu00117052 - MOD - Passing double pointer
12579 for proper NULLP assignment*/
12580 rgSCHUtlFreeSBuf(cell->instIdx,
12581 (Data**)(&(svc->sch)), (sizeof(RgSchCmnDlSvc)));
12584 rgSCHLaaDeInitDlLchCb(cell, svc);
12593 * @brief This function Processes the Final Allocations
12594 * made by the RB Allocator against the requested
12595 * CCCH SDURetx Allocations.
12599 * Function: rgSCHCmnDlCcchSduRetxFnlz
12600 * Purpose: This function Processes the Final Allocations
12601 * made by the RB Allocator against the requested
12602 * CCCH Retx Allocations.
12603 * Scans through the scheduled list of ccchSdu retrans
12604 * fills the corresponding pdcch, adds the hqProc to
12605 * the corresponding SubFrm and removes the hqP from
12608 * Invoked by: Common Scheduler
12610 * @param[in] RgSchCellCb *cell
12611 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12616 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz
12619 RgSchCmnDlRbAllocInfo *allocInfo
12622 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo)
12624 RgSchCmnDlRbAllocInfo *allocInfo;
12628 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12629 RgSchDlRbAlloc *rbAllocInfo;
12630 RgSchDlHqProcCb *hqP;
12632 TRC2(rgSCHCmnDlCcchSduRetxFnlz);
12634 /* Traverse through the Scheduled Retx List */
12635 node = allocInfo->ccchSduAlloc.schdCcchSduRetxLst.first;
12638 hqP = (RgSchDlHqProcCb *)(node->node);
12640 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
12642 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12644 /* Remove the HqP from cell's ccchSduRetxLst */
12645 cmLListDelFrm(&cmnCellDl->ccchSduRetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12646 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12648 /* Fix: syed dlAllocCb reset should be performed.
12649 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12650 rgSCHCmnDlUeResetTemp(ue, hqP);
12652 /* Fix: syed dlAllocCb reset should be performed.
12653 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12654 node = allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst.first;
12657 hqP = (RgSchDlHqProcCb *)(node->node);
12660 /* reset the UE allocation Information */
12661 rgSCHCmnDlUeResetTemp(ue, hqP);
12667 * @brief This function Processes the Final Allocations
12668 * made by the RB Allocator against the requested
12669 * CCCH Retx Allocations.
12673 * Function: rgSCHCmnDlCcchRetxFnlz
12674 * Purpose: This function Processes the Final Allocations
12675 * made by the RB Allocator against the requested
12676 * CCCH Retx Allocations.
12677 * Scans through the scheduled list of msg4 retrans
12678 * fills the corresponding pdcch, adds the hqProc to
12679 * the corresponding SubFrm and removes the hqP from
12682 * Invoked by: Common Scheduler
12684 * @param[in] RgSchCellCb *cell
12685 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12690 PRIVATE Void rgSCHCmnDlCcchRetxFnlz
12693 RgSchCmnDlRbAllocInfo *allocInfo
12696 PRIVATE Void rgSCHCmnDlCcchRetxFnlz(cell, allocInfo)
12698 RgSchCmnDlRbAllocInfo *allocInfo;
12702 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12703 RgSchDlRbAlloc *rbAllocInfo;
12704 RgSchDlHqProcCb *hqP;
12706 TRC2(rgSCHCmnDlCcchRetxFnlz);
12708 /* Traverse through the Scheduled Retx List */
12709 node = allocInfo->msg4Alloc.schdMsg4RetxLst.first;
12712 hqP = (RgSchDlHqProcCb *)(node->node);
12713 raCb = hqP->hqE->raCb;
12714 rbAllocInfo = &raCb->rbAllocInfo;
12716 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12718 /* Remove the HqP from cell's msg4RetxLst */
12719 cmLListDelFrm(&cmnCellDl->msg4RetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12720 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12721 /* Fix: syed dlAllocCb reset should be performed.
12722 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12723 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12724 rgSCHCmnDlHqPResetTemp(hqP);
12726 /* Fix: syed dlAllocCb reset should be performed.
12727 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12728 node = allocInfo->msg4Alloc.nonSchdMsg4RetxLst.first;
12731 hqP = (RgSchDlHqProcCb *)(node->node);
12732 raCb = hqP->hqE->raCb;
12734 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12735 rgSCHCmnDlHqPResetTemp(hqP);
12742 * @brief This function Processes the Final Allocations
12743 * made by the RB Allocator against the requested
12744 * CCCH SDU tx Allocations.
12748 * Function: rgSCHCmnDlCcchSduTxFnlz
12749 * Purpose: This function Processes the Final Allocations
12750 * made by the RB Allocator against the requested
12751 * CCCH tx Allocations.
12752 * Scans through the scheduled list of CCCH SDU trans
12753 * fills the corresponding pdcch, adds the hqProc to
12754 * the corresponding SubFrm and removes the hqP from
12757 * Invoked by: Common Scheduler
12759 * @param[in] RgSchCellCb *cell
12760 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12765 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz
12768 RgSchCmnDlRbAllocInfo *allocInfo
12771 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo)
12773 RgSchCmnDlRbAllocInfo *allocInfo;
12778 RgSchDlRbAlloc *rbAllocInfo;
12779 RgSchDlHqProcCb *hqP;
12780 RgSchLchAllocInfo lchSchdData;
12781 TRC2(rgSCHCmnDlCcchSduTxFnlz);
12783 /* Traverse through the Scheduled Retx List */
12784 node = allocInfo->ccchSduAlloc.schdCcchSduTxLst.first;
12787 hqP = (RgSchDlHqProcCb *)(node->node);
12788 ueCb = hqP->hqE->ue;
12790 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
12792 /* fill the pdcch and HqProc */
12793 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12795 /* Remove the raCb from cell's toBeSchdLst */
12796 cmLListDelFrm(&cell->ccchSduUeLst, &ueCb->ccchSduLnk);
12797 ueCb->ccchSduLnk.node = (PTR)NULLP;
12799 /* Fix : Resetting this required to avoid complication
12800 * in reestablishment case */
12801 ueCb->dlCcchInfo.bo = 0;
12803 /* Indicate DHM of the CCCH LC scheduling */
12804 hqP->tbInfo[0].contResCe = NOTPRSNT;
12805 lchSchdData.lcId = 0;
12806 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12807 (RGSCH_MSG4_HDRSIZE);
12808 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12810 /* Fix: syed dlAllocCb reset should be performed.
12811 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12812 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12814 /* Fix: syed dlAllocCb reset should be performed.
12815 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12816 node = allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst.first;
12819 hqP = (RgSchDlHqProcCb *)(node->node);
12820 ueCb = hqP->hqE->ue;
12822 /* Release HqProc */
12823 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12824 /*Fix: Removing releasing of TB1 as it will not exist for CCCH SDU and hence caused a crash*/
12825 /*rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12826 /* reset the UE allocation Information */
12827 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12834 * @brief This function Processes the Final Allocations
12835 * made by the RB Allocator against the requested
12836 * CCCH tx Allocations.
12840 * Function: rgSCHCmnDlCcchTxFnlz
12841 * Purpose: This function Processes the Final Allocations
12842 * made by the RB Allocator against the requested
12843 * CCCH tx Allocations.
12844 * Scans through the scheduled list of msg4 trans
12845 * fills the corresponding pdcch, adds the hqProc to
12846 * the corresponding SubFrm and removes the hqP from
12849 * Invoked by: Common Scheduler
12851 * @param[in] RgSchCellCb *cell
12852 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12857 PRIVATE Void rgSCHCmnDlCcchTxFnlz
12860 RgSchCmnDlRbAllocInfo *allocInfo
12863 PRIVATE Void rgSCHCmnDlCcchTxFnlz(cell, allocInfo)
12865 RgSchCmnDlRbAllocInfo *allocInfo;
12870 RgSchDlRbAlloc *rbAllocInfo;
12871 RgSchDlHqProcCb *hqP;
12872 RgSchLchAllocInfo lchSchdData;
12873 TRC2(rgSCHCmnDlCcchTxFnlz);
12875 /* Traverse through the Scheduled Retx List */
12876 node = allocInfo->msg4Alloc.schdMsg4TxLst.first;
12879 hqP = (RgSchDlHqProcCb *)(node->node);
12880 raCb = hqP->hqE->raCb;
12882 rbAllocInfo = &raCb->rbAllocInfo;
12884 /* fill the pdcch and HqProc */
12885 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12886 /* MSG4 Fix Start */
12888 rgSCHRamRmvFrmRaInfoSchdLst(cell, raCb);
12891 /* Indicate DHM of the CCCH LC scheduling */
12892 lchSchdData.lcId = 0;
12893 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12894 (RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE);
12895 /* TRansmitting presence of cont Res CE across MAC-SCH interface to
12896 * identify CCCH SDU transmissions which need to be done
12898 * contention resolution CE*/
12899 hqP->tbInfo[0].contResCe = PRSNT_NODEF;
12900 /*Dont add lc if only cont res CE is being transmitted*/
12901 if(raCb->dlCcchInfo.bo)
12903 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12908 /* Fix: syed dlAllocCb reset should be performed.
12909 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12910 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12911 rgSCHCmnDlHqPResetTemp(hqP);
12913 node = allocInfo->msg4Alloc.nonSchdMsg4TxLst.first;
12916 hqP = (RgSchDlHqProcCb *)(node->node);
12917 raCb = hqP->hqE->raCb;
12919 rbAllocInfo = &raCb->rbAllocInfo;
12920 /* Release HqProc */
12921 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12922 /*Fix: Removing releasing of TB1 as it will not exist for MSG4 and hence caused a crash*/
12923 /* rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12924 /* reset the UE allocation Information */
12925 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12926 rgSCHCmnDlHqPResetTemp(hqP);
12933 * @brief This function calculates the BI Index to be sent in the Bi header
12937 * Function: rgSCHCmnGetBiIndex
12938 * Purpose: This function Processes utilizes the previous BI time value
12939 * calculated and the difference last BI sent time and current time. To
12940 * calculate the latest BI Index. It also considers the how many UE's
12941 * Unserved in this subframe.
12943 * Invoked by: Common Scheduler
12945 * @param[in] RgSchCellCb *cell
12946 * @param[in] U32 ueCount
12951 PUBLIC U8 rgSCHCmnGetBiIndex
12957 PUBLIC U8 rgSCHCmnGetBiIndex(cell, ueCount)
12962 S16 prevVal = 0; /* To Store Intermediate Value */
12963 U16 newBiVal = 0; /* To store Bi Value in millisecond */
12967 TRC2(rgSCHCmnGetBiIndex)
12969 if (cell->biInfo.prevBiTime != 0)
12972 if(cell->emtcEnable == TRUE)
12974 timeDiff =(RGSCH_CALC_SF_DIFF_EMTC(cell->crntTime, cell->biInfo.biTime));
12979 timeDiff =(RGSCH_CALC_SF_DIFF(cell->crntTime, cell->biInfo.biTime));
12982 prevVal = cell->biInfo.prevBiTime - timeDiff;
12988 newBiVal = RG_SCH_CMN_GET_BI_VAL(prevVal,ueCount);
12989 /* To be used next time when BI is calculated */
12991 if(cell->emtcEnable == TRUE)
12993 RGSCHCPYTIMEINFO_EMTC(cell->crntTime, cell->biInfo.biTime)
12998 RGSCHCPYTIMEINFO(cell->crntTime, cell->biInfo.biTime)
13001 /* Search the actual BI Index from table Backoff Parameters Value and
13002 * return that Index */
13005 if (rgSchCmnBiTbl[idx] > newBiVal)
13010 }while(idx < RG_SCH_CMN_NUM_BI_VAL-1);
13011 cell->biInfo.prevBiTime = rgSchCmnBiTbl[idx];
13012 /* For 16 Entries in Table 7.2.1 36.321.880 - 3 reserved so total 13 Entries */
13013 RETVALUE(idx); /* Returning reserved value from table UE treats it has 960 ms */
13014 } /* rgSCHCmnGetBiIndex */
13018 * @brief This function Processes the Final Allocations
13019 * made by the RB Allocator against the requested
13020 * RAR allocations. Assumption: The reuqested
13021 * allocations are always satisfied completely.
13022 * Hence no roll back.
13026 * Function: rgSCHCmnDlRaRspFnlz
13027 * Purpose: This function Processes the Final Allocations
13028 * made by the RB Allocator against the requested.
13029 * Takes care of PDCCH filling.
13031 * Invoked by: Common Scheduler
13033 * @param[in] RgSchCellCb *cell
13034 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13039 PRIVATE Void rgSCHCmnDlRaRspFnlz
13042 RgSchCmnDlRbAllocInfo *allocInfo
13045 PRIVATE Void rgSCHCmnDlRaRspFnlz(cell, allocInfo)
13047 RgSchCmnDlRbAllocInfo *allocInfo;
13051 RgSchDlRbAlloc *raRspAlloc;
13052 RgSchDlSf *subFrm = NULLP;
13056 RgSchRaReqInfo *raReq;
13058 RgSchUlAlloc *ulAllocRef=NULLP;
13059 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13060 U8 allocRapidCnt = 0;
13062 U32 msg3SchdIdx = 0;
13063 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
13067 TRC2(rgSCHCmnDlRaRspFnlz);
13069 for (rarCnt=0; rarCnt<RG_SCH_CMN_MAX_CMN_PDCCH; rarCnt++)
13071 raRspAlloc = &allocInfo->raRspAlloc[rarCnt];
13072 /* Having likely condition first for optimization */
13073 if (!raRspAlloc->pdcch)
13079 subFrm = raRspAlloc->dlSf;
13080 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13081 /* Corrected RACH handling for multiple RAPIDs per RARNTI */
13082 allocRapidCnt = raRspAlloc->numRapids;
13083 while (allocRapidCnt)
13085 raReq = (RgSchRaReqInfo *)(reqLst->first->node);
13086 /* RACHO: If dedicated preamble, then allocate UL Grant
13087 * (consequence of handover/pdcchOrder) and continue */
13088 if (RGSCH_IS_DEDPRM(cell, raReq->raReq.rapId))
13090 rgSCHCmnHdlHoPo(cell, &subFrm->raRsp[rarCnt].contFreeUeLst,
13092 cmLListDelFrm(reqLst, reqLst->first);
13094 /* ccpu00117052 - MOD - Passing double pointer
13095 for proper NULLP assignment*/
13096 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13097 sizeof(RgSchRaReqInfo));
13101 if(cell->overLoadBackOffEnab)
13102 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13103 cmLListDelFrm(reqLst, reqLst->first);
13105 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13106 sizeof(RgSchRaReqInfo));
13109 /* Attempt to include each RA request into the RSP */
13110 /* Any failure in the procedure is considered to */
13111 /* affect futher allocations in the same TTI. When */
13112 /* a failure happens, we break out and complete */
13113 /* the processing for random access */
13114 if (rgSCHRamCreateRaCb(cell, &raCb, &err) != ROK)
13118 /* Msg3 allocation request to USM */
13119 if (raReq->raReq.rapId < cell->rachCfg.sizeRaPreambleGrpA)
13123 /*ccpu00128820 - MOD - Msg3 alloc double delete issue*/
13124 rgSCHCmnMsg3GrntReq(cell, raCb->tmpCrnti, preamGrpA, \
13125 &(raCb->msg3HqProc), &ulAllocRef, &raCb->msg3HqProcId);
13126 if (ulAllocRef == NULLP)
13128 rgSCHRamDelRaCb(cell, raCb, TRUE);
13131 if (raReq->raReq.cqiPres)
13133 raCb->ccchCqi = raReq->raReq.cqiIdx;
13137 raCb->ccchCqi = cellDl->ccchCqi;
13139 raCb->rapId = raReq->raReq.rapId;
13140 raCb->ta.pres = TRUE;
13141 raCb->ta.val = raReq->raReq.ta;
13142 raCb->msg3Grnt = ulAllocRef->grnt;
13143 /* Populating the tpc value received */
13144 raCb->msg3Grnt.tpc = raReq->raReq.tpc;
13145 /* PHR handling for MSG3 */
13146 ulAllocRef->raCb = raCb;
13148 /* To the crntTime, add the MIN time at which UE will
13149 * actually send MSG3 i.e DL_DELTA+6 */
13150 raCb->msg3AllocTime = cell->crntTime;
13151 RGSCH_INCR_SUB_FRAME(raCb->msg3AllocTime, RG_SCH_CMN_MIN_MSG3_RECP_INTRVL);
13153 msg3SchdIdx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) %
13154 RGSCH_NUM_SUB_FRAMES;
13155 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
13156 special subframe */
13157 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][msg3SchdIdx] !=
13158 RG_SCH_TDD_UL_SUBFRAME)
13160 RGSCHCMNADDTOCRNTTIME(cell->crntTime,raCb->msg3AllocTime,
13161 RG_SCH_CMN_DL_DELTA)
13162 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][
13163 raCb->msg3AllocTime.slot];
13164 RGSCHCMNADDTOCRNTTIME(raCb->msg3AllocTime, raCb->msg3AllocTime,
13168 cmLListAdd2Tail(&subFrm->raRsp[rarCnt].raRspLst, &raCb->rspLnk);
13169 raCb->rspLnk.node = (PTR)raCb;
13170 cmLListDelFrm(reqLst, reqLst->first);
13172 /* ccpu00117052 - MOD - Passing double pointer
13173 for proper NULLP assignment*/
13174 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13175 sizeof(RgSchRaReqInfo));
13177 /* SR_RACH_STATS : RAR scheduled */
13182 /* Fill subframe data members */
13183 subFrm->raRsp[rarCnt].raRnti = raRspAlloc->rnti;
13184 subFrm->raRsp[rarCnt].pdcch = raRspAlloc->pdcch;
13185 subFrm->raRsp[rarCnt].tbSz = raRspAlloc->tbInfo[0].bytesAlloc;
13186 /* Fill PDCCH data members */
13187 rgSCHCmnFillPdcch(cell, subFrm->raRsp[rarCnt].pdcch, raRspAlloc);
13190 if(cell->overLoadBackOffEnab)
13191 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13192 subFrm->raRsp[rarCnt].backOffInd.pres = PRSNT_NODEF;
13193 subFrm->raRsp[rarCnt].backOffInd.val = cell->overLoadBackOffval;
13198 subFrm->raRsp[rarCnt].backOffInd.pres = NOTPRSNT;
13201 /*[ccpu00125212] Avoiding sending of empty RAR in case of RAR window
13202 is short and UE is sending unauthorised preamble.*/
13203 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13204 if ((raRspAlloc->biEstmt) && (reqLst->count))
13206 subFrm->raRsp[0].backOffInd.pres = PRSNT_NODEF;
13207 /* Added as part of Upgrade */
13208 subFrm->raRsp[0].backOffInd.val =
13209 rgSCHCmnGetBiIndex(cell, reqLst->count);
13211 /* SR_RACH_STATS : Back Off Inds */
13215 else if ((subFrm->raRsp[rarCnt].raRspLst.first == NULLP) &&
13216 (subFrm->raRsp[rarCnt].contFreeUeLst.first == NULLP))
13218 /* Return the grabbed PDCCH */
13219 rgSCHUtlPdcchPut(cell, &subFrm->pdcchInfo, raRspAlloc->pdcch);
13220 subFrm->raRsp[rarCnt].pdcch = NULLP;
13221 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRaRspAlloc(): "
13222 "Not even one RaReq.");
13226 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId,
13227 "RNTI:%d Scheduled RAR @ (%u,%u) ",
13229 cell->crntTime.sfn,
13230 cell->crntTime.slot);
13236 * @brief This function computes rv.
13240 * Function: rgSCHCmnDlCalcRvForBcch
13241 * Purpose: This function computes rv.
13243 * Invoked by: Common Scheduler
13245 * @param[in] RgSchCellCb *cell
13246 * @param[in] Bool si
13252 PRIVATE U8 rgSCHCmnDlCalcRvForBcch
13259 PRIVATE U8 rgSCHCmnDlCalcRvForBcch(cell, si, i)
13266 CmLteTimingInfo frm;
13267 TRC2(rgSCHCmnDlCalcRvForBcch);
13269 frm = cell->crntTime;
13270 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
13278 k = (frm.sfn/2) % 4;
13280 rv = RGSCH_CEIL(3*k, 2) % 4;
13285 * @brief This function Processes the Final Allocations
13286 * made by the RB Allocator against the requested
13287 * BCCH/PCCH allocations. Assumption: The reuqested
13288 * allocations are always satisfied completely.
13289 * Hence no roll back.
13293 * Function: rgSCHCmnDlBcchPcchFnlz
13294 * Purpose: This function Processes the Final Allocations
13295 * made by the RB Allocator against the requested.
13296 * Takes care of PDCCH filling.
13298 * Invoked by: Common Scheduler
13300 * @param[in] RgSchCellCb *cell
13301 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13306 PRIVATE Void rgSCHCmnDlBcchPcchFnlz
13309 RgSchCmnDlRbAllocInfo *allocInfo
13312 PRIVATE Void rgSCHCmnDlBcchPcchFnlz(cell, allocInfo)
13314 RgSchCmnDlRbAllocInfo *allocInfo;
13317 RgSchDlRbAlloc *rbAllocInfo;
13321 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
13323 #ifdef LTEMAC_HDFDD
13324 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13326 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13330 /* Moving variables to available scope for optimization */
13331 RgSchClcDlLcCb *pcch;
13334 RgSchClcDlLcCb *bcch;
13337 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13339 TRC2(rgSCHCmnDlBcchPcchFnlz);
13342 rbAllocInfo = &allocInfo->pcchAlloc;
13343 if (rbAllocInfo->pdcch)
13345 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13347 /* Added sfIdx calculation for TDD as well */
13349 #ifdef LTEMAC_HDFDD
13350 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13352 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13355 subFrm = rbAllocInfo->dlSf;
13356 pcch = rgSCHDbmGetPcch(cell);
13359 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnDlBcchPcchFnlz( ): "
13360 "No Pcch Present");
13364 /* Added Dl TB count for paging message transmission*/
13366 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13368 bo = (RgSchClcBoRpt *)pcch->boLst.first->node;
13369 cmLListDelFrm(&pcch->boLst, &bo->boLstEnt);
13370 /* ccpu00117052 - MOD - Passing double pointer
13371 for proper NULLP assignment*/
13372 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13373 /* Fill subframe data members */
13374 subFrm->pcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13375 subFrm->pcch.pdcch = rbAllocInfo->pdcch;
13376 /* Fill PDCCH data members */
13377 rgSCHCmnFillPdcch(cell, subFrm->pcch.pdcch, rbAllocInfo);
13378 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, pcch->lcId, TRUE);
13379 /* ccpu00132314-ADD-Update the tx power allocation info
13380 TODO-Need to add a check for max tx power per symbol */
13381 subfrmAlloc->cmnLcInfo.pcchInfo.txPwrOffset = cellDl->pcchTxPwrOffset;
13385 rbAllocInfo = &allocInfo->bcchAlloc;
13386 if (rbAllocInfo->pdcch)
13388 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13390 #ifdef LTEMAC_HDFDD
13391 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13393 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13396 subFrm = rbAllocInfo->dlSf;
13398 /* Fill subframe data members */
13399 subFrm->bcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13400 subFrm->bcch.pdcch = rbAllocInfo->pdcch;
13401 /* Fill PDCCH data members */
13402 rgSCHCmnFillPdcch(cell, subFrm->bcch.pdcch, rbAllocInfo);
13404 if(rbAllocInfo->schdFirst)
13407 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
13408 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13410 /*Copy the SIB1 msg buff into interface buffer */
13411 SCpyMsgMsg(cell->siCb.crntSiInfo.sib1Info.sib1,
13412 rgSchCb[cell->instIdx].rgSchInit.region,
13413 rgSchCb[cell->instIdx].rgSchInit.pool,
13414 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13415 #endif/*RGR_SI_SCH*/
13416 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13417 rgSCHCmnDlCalcRvForBcch(cell, FALSE, 0);
13425 i = cell->siCb.siCtx.i;
13426 /*Decrement the retransmission count */
13427 cell->siCb.siCtx.retxCntRem--;
13429 /*Copy the SI msg buff into interface buffer */
13430 if(cell->siCb.siCtx.warningSiFlag == FALSE)
13432 SCpyMsgMsg(cell->siCb.siArray[cell->siCb.siCtx.siId-1].si,
13433 rgSchCb[cell->instIdx].rgSchInit.region,
13434 rgSchCb[cell->instIdx].rgSchInit.pool,
13435 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13439 pdu = rgSCHUtlGetWarningSiPdu(cell);
13440 RGSCH_NULL_CHECK(cell->instIdx, pdu);
13442 rgSchCb[cell->instIdx].rgSchInit.region,
13443 rgSchCb[cell->instIdx].rgSchInit.pool,
13444 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13445 if(cell->siCb.siCtx.retxCntRem == 0)
13447 rgSCHUtlFreeWarningSiPdu(cell);
13448 cell->siCb.siCtx.warningSiFlag = FALSE;
13453 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
13454 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13456 if(bo->retxCnt != cell->siCfg.retxCnt-1)
13461 #endif/*RGR_SI_SCH*/
13462 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13463 rgSCHCmnDlCalcRvForBcch(cell, TRUE, i);
13466 /* Added Dl TB count for SIB1 and SI messages transmission.
13467 * This counter will be incremented only for the first transmission
13468 * (with RV 0) of these messages*/
13470 if(subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv == 0)
13472 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13476 if(bo->retxCnt == 0)
13478 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
13479 /* ccpu00117052 - MOD - Passing double pointer
13480 for proper NULLP assignment*/
13481 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13483 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, bcch->lcId, sendInd);
13485 /*Fill the interface info */
13486 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, NULLD, NULLD);
13488 /* ccpu00132314-ADD-Update the tx power allocation info
13489 TODO-Need to add a check for max tx power per symbol */
13490 subfrmAlloc->cmnLcInfo.bcchInfo.txPwrOffset = cellDl->bcchTxPwrOffset;
13492 /*mBuf has been already copied above */
13493 #endif/*RGR_SI_SCH*/
13506 * Function: rgSCHCmnUlSetAllUnSched
13509 * Invoked by: Common Scheduler
13511 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13516 PRIVATE Void rgSCHCmnUlSetAllUnSched
13518 RgSchCmnUlRbAllocInfo *allocInfo
13521 PRIVATE Void rgSCHCmnUlSetAllUnSched(allocInfo)
13522 RgSchCmnUlRbAllocInfo *allocInfo;
13527 TRC2(rgSCHCmnUlSetAllUnSched);
13529 node = allocInfo->contResLst.first;
13532 rgSCHCmnUlMov2NonSchdCntResLst(allocInfo, (RgSchUeCb *)node->node);
13533 node = allocInfo->contResLst.first;
13536 node = allocInfo->retxUeLst.first;
13539 rgSCHCmnUlMov2NonSchdRetxUeLst(allocInfo, (RgSchUeCb *)node->node);
13540 node = allocInfo->retxUeLst.first;
13543 node = allocInfo->ueLst.first;
13546 rgSCHCmnUlMov2NonSchdUeLst(allocInfo, (RgSchUeCb *)node->node);
13547 node = allocInfo->ueLst.first;
13559 * Function: rgSCHCmnUlAdd2CntResLst
13562 * Invoked by: Common Scheduler
13564 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13565 * @param[in] RgSchUeCb *ue
13570 PUBLIC Void rgSCHCmnUlAdd2CntResLst
13572 RgSchCmnUlRbAllocInfo *allocInfo,
13576 PUBLIC Void rgSCHCmnUlAdd2CntResLst(allocInfo, ue)
13577 RgSchCmnUlRbAllocInfo *allocInfo;
13581 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,ue->cell))->alloc);
13582 TRC2(rgSCHCmnUlAdd2CntResLst);
13583 cmLListAdd2Tail(&allocInfo->contResLst, &ulAllocInfo->reqLnk);
13584 ulAllocInfo->reqLnk.node = (PTR)ue;
13593 * Function: rgSCHCmnUlAdd2UeLst
13596 * Invoked by: Common Scheduler
13598 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13599 * @param[in] RgSchUeCb *ue
13604 PUBLIC Void rgSCHCmnUlAdd2UeLst
13607 RgSchCmnUlRbAllocInfo *allocInfo,
13611 PUBLIC Void rgSCHCmnUlAdd2UeLst(cell, allocInfo, ue)
13613 RgSchCmnUlRbAllocInfo *allocInfo;
13617 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,cell))->alloc);
13618 TRC2(rgSCHCmnUlAdd2UeLst);
13619 if (ulAllocInfo->reqLnk.node == NULLP)
13621 cmLListAdd2Tail(&allocInfo->ueLst, &ulAllocInfo->reqLnk);
13622 ulAllocInfo->reqLnk.node = (PTR)ue;
13632 * Function: rgSCHCmnAllocUlRb
13633 * Purpose: To do RB allocations for uplink
13635 * Invoked by: Common Scheduler
13637 * @param[in] RgSchCellCb *cell
13638 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
13642 PUBLIC Void rgSCHCmnAllocUlRb
13645 RgSchCmnUlRbAllocInfo *allocInfo
13648 PUBLIC Void rgSCHCmnAllocUlRb(cell, allocInfo)
13650 RgSchCmnUlRbAllocInfo *allocInfo;
13653 RgSchUlSf *sf = allocInfo->sf;
13654 TRC2(rgSCHCmnAllocUlRb);
13656 /* Schedule for new transmissions */
13657 rgSCHCmnUlRbAllocForLst(cell, sf, allocInfo->ueLst.count,
13658 &allocInfo->ueLst, &allocInfo->schdUeLst,
13659 &allocInfo->nonSchdUeLst, (Bool)TRUE);
13663 /***********************************************************
13665 * Func : rgSCHCmnUlRbAllocForLst
13667 * Desc : Allocate for a list in cmn rb alloc information passed
13676 **********************************************************/
13678 PRIVATE Void rgSCHCmnUlRbAllocForLst
13684 CmLListCp *schdLst,
13685 CmLListCp *nonSchdLst,
13689 PRIVATE Void rgSCHCmnUlRbAllocForLst(cell, sf, count, reqLst, schdLst,
13690 nonSchdLst, isNewTx)
13695 CmLListCp *schdLst;
13696 CmLListCp *nonSchdLst;
13705 CmLteTimingInfo timeInfo;
13708 TRC2(rgSCHCmnUlRbAllocForLst);
13710 if(schdLst->count == 0)
13712 cmLListInit(schdLst);
13715 cmLListInit(nonSchdLst);
13717 if(isNewTx == TRUE)
13719 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.numUes = (U8) count;
13721 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime, timeInfo, TFU_ULCNTRL_DLDELTA);
13722 k = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][timeInfo.subframe];
13723 RG_SCH_ADD_TO_CRNT_TIME(timeInfo,
13724 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo, k);
13726 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime,cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo,
13727 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
13732 for (lnk = reqLst->first; count; lnk = lnk->next, --count)
13734 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13735 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13740 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
13745 ueUl->subbandShare = ueUl->subbandRequired;
13746 if(isNewTx == TRUE)
13748 maxRb = RGSCH_MIN((ueUl->subbandRequired * MAX_5GTF_VRBG_SIZE), ue->ue5gtfCb.maxPrb);
13750 ret = rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole);
13753 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, schdLst);
13754 rgSCHCmnUlUeFillAllocInfo(cell, ue);
13758 gUl5gtfRbAllocFail++;
13759 #if defined (TENB_STATS) && defined (RG_5GTF)
13760 cell->tenbStats->sch.ul5gtfRbAllocFail++;
13762 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13763 ue->isMsg4PdcchWithCrnti = FALSE;
13764 ue->isSrGrant = FALSE;
13767 if(isNewTx == TRUE)
13769 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13770 ulAllocInfo[count - 1].rnti = ue->ueId;
13771 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13772 ulAllocInfo[count - 1].numPrb = ue->ul.nPrb;
13775 ueUl->subbandShare = 0; /* This reset will take care of
13776 * all scheduler types */
13778 for (; count; lnk = lnk->next, --count)
13780 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13781 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13782 ue->isMsg4PdcchWithCrnti = FALSE;
13789 /***********************************************************
13791 * Func : rgSCHCmnUlMdfyGrntForCqi
13793 * Desc : Modify UL Grant to consider presence of
13794 * CQI along with PUSCH Data.
13799 * - Scale down iTbs based on betaOffset and
13800 * size of Acqi Size.
13801 * - Optionally attempt to increase numSb by 1
13802 * if input payload size does not fit in due
13803 * to reduced tbSz as a result of iTbsNew.
13807 **********************************************************/
13809 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi
13821 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi(cell, ue, maxRb, numSb, iTbs, hqSz, stepDownItbs, effTgt)
13832 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(ue->cell);
13837 U32 remREsForPusch;
13840 U32 betaOffVal = ue->ul.betaOffstVal;
13841 U32 cqiRiRptSz = ue->ul.cqiRiSz;
13842 U32 betaOffHqVal = rgSchCmnBetaHqOffstTbl[ue->ul.betaHqOffst];
13843 U32 resNumSb = *numSb;
13844 U32 puschEff = 1000;
13847 Bool mdfyiTbsFlg = FALSE;
13848 U8 resiTbs = *iTbs;
13850 TRC2(rgSCHCmnUlMdfyGrntForCqi)
13855 iMcs = rgSCHCmnUlGetIMcsFrmITbs(resiTbs, RG_SCH_CMN_GET_UE_CTGY(ue));
13856 RG_SCH_UL_MCS_TO_MODODR(iMcs, modOdr);
13857 if (RG_SCH_CMN_GET_UE_CTGY(ue) != CM_LTE_UE_CAT_5)
13859 modOdr = RGSCH_MIN(RGSCH_QM_QPSK, modOdr);
13863 modOdr = RGSCH_MIN(RGSCH_QM_64QAM, modOdr);
13865 nPrb = resNumSb * cellUl->sbSize;
13866 /* Restricting the minumum iTbs requried to modify to 10 */
13867 if ((nPrb >= maxRb) && (resiTbs <= 10))
13869 /* Could not accomodate ACQI */
13872 totREs = nPrb * RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
13873 tbSz = rgTbSzTbl[0][resiTbs][nPrb-1];
13874 /* totalREs/tbSz = num of bits perRE. */
13875 cqiRiREs = (totREs * betaOffVal * cqiRiRptSz)/(1000 * tbSz); /* betaOffVal is represented
13876 as parts per 1000 */
13877 hqREs = (totREs * betaOffHqVal * hqSz)/(1000 * tbSz);
13878 if ((cqiRiREs + hqREs) < totREs)
13880 remREsForPusch = totREs - cqiRiREs - hqREs;
13881 bitsPerRe = (tbSz * 1000)/remREsForPusch; /* Multiplying by 1000 for Interger Oper */
13882 puschEff = bitsPerRe/modOdr;
13884 if (puschEff < effTgt)
13886 /* ensure resultant efficiency for PUSCH Data is within 0.93*/
13891 /* Alternate between increasing SB or decreasing iTbs until eff is met */
13892 if (mdfyiTbsFlg == FALSE)
13896 resNumSb = resNumSb + 1;
13898 mdfyiTbsFlg = TRUE;
13904 resiTbs-= stepDownItbs;
13906 mdfyiTbsFlg = FALSE;
13909 }while (1); /* Loop breaks if efficency is met
13910 or returns RFAILED if not able to meet the efficiency */
13919 /***********************************************************
13921 * Func : rgSCHCmnUlRbAllocForUe
13923 * Desc : Do uplink RB allocation for an UE.
13927 * Notes: Note that as of now, for retx, maxRb
13928 * is not considered. Alternatives, such
13929 * as dropping retx if it crosses maxRb
13930 * could be considered.
13934 **********************************************************/
13936 PRIVATE S16 rgSCHCmnUlRbAllocForUe
13945 PRIVATE S16 rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole)
13953 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
13954 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13955 RgSchUlAlloc *alloc = NULLP;
13961 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->schdHqProcIdx];
13963 RgSchUlHqProcCb *proc = NULLP;
13969 TfuDciFormat dciFrmt;
13973 TRC2(rgSCHCmnUlRbAllocForUe);
13975 rgSCHUhmGetAvlHqProc(cell, ue, &proc);
13978 //printf("UE [%d] HQ Proc unavailable\n", ue->ueId);
13983 if (ue->ue5gtfCb.rank == 2)
13985 dciFrmt = TFU_DCI_FORMAT_A2;
13990 dciFrmt = TFU_DCI_FORMAT_A1;
13993 /* 5gtf TODO : To pass dci frmt to this function */
13994 pdcch = rgSCHCmnPdcchAllocCrntSf(cell, ue);
13997 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
13998 "rgSCHCmnUlRbAllocForUe(): Could not get PDCCH for CRNTI:%d",ue->ueId);
14001 gUl5gtfPdcchSchd++;
14002 #if defined (TENB_STATS) && defined (RG_5GTF)
14003 cell->tenbStats->sch.ul5gtfPdcchSchd++;
14006 //TODO_SID using configured prb as of now
14007 nPrb = ue->ue5gtfCb.maxPrb;
14008 reqVrbg = nPrb/MAX_5GTF_VRBG_SIZE;
14009 iMcs = ue->ue5gtfCb.mcs; //gSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
14013 if((sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart > MAX_5GTF_VRBG)
14014 || (sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated > MAX_5GTF_VRBG))
14016 printf("5GTF_ERROR vrbg > 25 valstart = %d valalloc %d\n", sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart
14017 , sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated);
14022 /*TODO_SID: Workaround for alloc. Currently alloc is ulsf based. To handle multiple beams, we need a different
14023 design. Now alloc are formed based on MAX_5GTF_UE_SCH macro. */
14024 numVrbgTemp = MAX_5GTF_VRBG/MAX_5GTF_UE_SCH;
14027 alloc = rgSCHCmnUlSbAlloc(sf, numVrbgTemp,\
14030 if (alloc == NULLP)
14032 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
14033 "rgSCHCmnUlRbAllocForUe(): Could not get UlAlloc %d CRNTI:%d",numVrbg,ue->ueId);
14034 rgSCHCmnPdcchRlsCrntSf(cell, pdcch);
14037 gUl5gtfAllocAllocated++;
14038 #if defined (TENB_STATS) && defined (RG_5GTF)
14039 cell->tenbStats->sch.ul5gtfAllocAllocated++;
14041 alloc->grnt.vrbgStart = sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart;
14042 alloc->grnt.numVrbg = numVrbg;
14043 alloc->grnt.numLyr = numLyr;
14044 alloc->grnt.dciFrmt = dciFrmt;
14046 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart += numVrbg;
14047 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated += numVrbg;
14049 //rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
14051 sf->totPrb += alloc->grnt.numRb;
14052 ue->ul.nPrb = alloc->grnt.numRb;
14054 if (ue->csgMmbrSta != TRUE)
14056 cellUl->ncsgPrbCnt += alloc->grnt.numRb;
14058 cellUl->totPrbCnt += (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14059 alloc->pdcch = pdcch;
14060 alloc->grnt.iMcs = iMcs;
14061 alloc->grnt.iMcsCrnt = iMcsCrnt;
14062 alloc->grnt.hop = 0;
14063 /* Initial Num RBs support for UCI on PUSCH */
14065 ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14067 alloc->forMsg3 = FALSE;
14068 //RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTb5gtfSzTbl[0], (iTbs));
14070 //ueUl->alloc.allocdBytes = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
14071 /* TODO_SID Allocating based on configured MCS as of now.
14072 Currently for format A2. When doing multi grp per tti, need to update this. */
14073 ueUl->alloc.allocdBytes = (rgSch5gtfTbSzTbl[iMcs]/8) * ue->ue5gtfCb.rank;
14075 alloc->grnt.datSz = ueUl->alloc.allocdBytes;
14076 //TODO_SID Need to check mod order.
14077 RG_SCH_CMN_TBS_TO_MODODR(iMcs, alloc->grnt.modOdr);
14078 //alloc->grnt.modOdr = 6;
14079 alloc->grnt.isRtx = FALSE;
14081 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
14082 alloc->grnt.SCID = 0;
14083 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
14084 alloc->grnt.PMI = 0;
14085 alloc->grnt.uciOnxPUSCH = 0;
14086 alloc->grnt.hqProcId = proc->procId;
14088 alloc->hqProc = proc;
14089 alloc->hqProc->ulSfIdx = cellUl->schdIdx;
14091 /*commenting to retain the rnti used for transmission SPS/c-rnti */
14092 alloc->rnti = ue->ueId;
14093 ueUl->alloc.alloc = alloc;
14094 /*rntiwari-Adding the debug for generating the graph.*/
14095 /* No grant attr recorded now */
14099 /***********************************************************
14101 * Func : rgSCHCmnUlRbAllocAddUeToLst
14103 * Desc : Add UE to list (scheduled/non-scheduled list)
14104 * for UL RB allocation information.
14112 **********************************************************/
14114 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst
14121 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst(cell, ue, lst)
14127 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
14128 TRC2(rgSCHCmnUlRbAllocAddUeToLst);
14131 gUl5gtfUeRbAllocDone++;
14132 #if defined (TENB_STATS) && defined (RG_5GTF)
14133 cell->tenbStats->sch.ul5gtfUeRbAllocDone++;
14135 cmLListAdd2Tail(lst, &ueUl->alloc.schdLstLnk);
14136 ueUl->alloc.schdLstLnk.node = (PTR)ue;
14141 * @brief This function Processes the Final Allocations
14142 * made by the RB Allocator against the requested.
14146 * Function: rgSCHCmnUlAllocFnlz
14147 * Purpose: This function Processes the Final Allocations
14148 * made by the RB Allocator against the requested.
14150 * Invoked by: Common Scheduler
14152 * @param[in] RgSchCellCb *cell
14153 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14158 PRIVATE Void rgSCHCmnUlAllocFnlz
14161 RgSchCmnUlRbAllocInfo *allocInfo
14164 PRIVATE Void rgSCHCmnUlAllocFnlz(cell, allocInfo)
14166 RgSchCmnUlRbAllocInfo *allocInfo;
14169 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14170 TRC2(rgSCHCmnUlAllocFnlz);
14172 /* call scheduler specific Finalization */
14173 cellSch->apisUl->rgSCHUlAllocFnlz(cell, allocInfo);
14179 * @brief This function Processes the Final Allocations
14180 * made by the RB Allocator against the requested.
14184 * Function: rgSCHCmnDlAllocFnlz
14185 * Purpose: This function Processes the Final Allocations
14186 * made by the RB Allocator against the requested.
14188 * Invoked by: Common Scheduler
14190 * @param[in] RgSchCellCb *cell
14195 PUBLIC Void rgSCHCmnDlAllocFnlz
14200 PUBLIC Void rgSCHCmnDlAllocFnlz(cell)
14204 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14205 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
14207 TRC2(rgSCHCmnDlAllocFnlz);
14209 rgSCHCmnDlCcchRetxFnlz(cell, allocInfo);
14210 rgSCHCmnDlCcchTxFnlz(cell, allocInfo);
14212 /* Added below functions for handling CCCH SDU transmission received
14214 * * guard timer expiry*/
14215 rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo);
14216 rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo);
14218 rgSCHCmnDlRaRspFnlz(cell, allocInfo);
14219 /* call scheduler specific Finalization */
14220 cellSch->apisDl->rgSCHDlAllocFnlz(cell, allocInfo);
14222 /* Stack Crash problem for TRACE5 Changes. Added the return below */
14229 * @brief Update an uplink subframe.
14233 * Function : rgSCHCmnUlUpdSf
14235 * For each allocation
14236 * - if no more tx needed
14237 * - Release allocation
14239 * - Perform retransmission
14241 * @param[in] RgSchUlSf *sf
14245 PRIVATE Void rgSCHCmnUlUpdSf
14248 RgSchCmnUlRbAllocInfo *allocInfo,
14252 PRIVATE Void rgSCHCmnUlUpdSf(cell, allocInfo, sf)
14254 RgSchCmnUlRbAllocInfo *allocInfo;
14259 TRC2(rgSCHCmnUlUpdSf);
14261 while ((lnk = sf->allocs.first))
14263 RgSchUlAlloc *alloc = (RgSchUlAlloc *)lnk->node;
14266 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
14271 /* If need to handle all retx together, run another loop separately */
14272 rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc);
14274 rgSCHCmnUlRlsUlAlloc(cell, sf, alloc);
14277 /* By this time, all allocs would have been cleared and
14278 * SF is reset to be made ready for new allocations. */
14279 rgSCHCmnUlSfReset(cell, sf);
14280 /* In case there are timing problems due to msg3
14281 * allocations being done in advance, (which will
14282 * probably happen with the current FDD code that
14283 * handles 8 subframes) one solution
14284 * could be to hold the (recent) msg3 allocs in a separate
14285 * list, and then possibly add that to the actual
14286 * list later. So at this time while allocations are
14287 * traversed, the recent msg3 ones are not seen. Anytime after
14288 * this (a good time is when the usual allocations
14289 * are made), msg3 allocations could be transferred to the
14290 * normal list. Not doing this now as it is assumed
14291 * that incorporation of TDD shall take care of this.
14299 * @brief Handle uplink allocation for retransmission.
14303 * Function : rgSCHCmnUlHndlAllocRetx
14305 * Processing Steps:
14306 * - Add to queue for retx.
14307 * - Do not release here, release happends as part
14308 * of the loop that calls this function.
14310 * @param[in] RgSchCellCb *cell
14311 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14312 * @param[in] RgSchUlSf *sf
14313 * @param[in] RgSchUlAlloc *alloc
14317 PRIVATE Void rgSCHCmnUlHndlAllocRetx
14320 RgSchCmnUlRbAllocInfo *allocInfo,
14322 RgSchUlAlloc *alloc
14325 PRIVATE Void rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc)
14327 RgSchCmnUlRbAllocInfo *allocInfo;
14329 RgSchUlAlloc *alloc;
14333 RgSchCmnUlUe *ueUl;
14334 TRC2(rgSCHCmnUlHndlAllocRetx);
14336 rgTbSzTbl[0][rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs)]\
14337 [alloc->grnt.numRb-1]/8;
14338 if (!alloc->forMsg3)
14340 ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue);
14341 ueUl->alloc.reqBytes = bytes;
14342 rgSCHUhmRetx(alloc->hqProc);
14343 rgSCHCmnUlAdd2RetxUeLst(allocInfo, alloc->ue);
14347 /* RACHO msg3 retx handling. Part of RACH procedure changes. */
14348 retxAlloc = rgSCHCmnUlGetUlAlloc(cell, sf, alloc->numSb);
14349 if (retxAlloc == NULLP)
14351 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
14352 "rgSCHCmnUlRbAllocForUe():Could not get UlAlloc for msg3Retx RNTI:%d",
14356 retxAlloc->grnt.iMcs = alloc->grnt.iMcs;
14357 retxAlloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl\
14358 [alloc->hqProc->rvIdx];
14359 retxAlloc->grnt.nDmrs = 0;
14360 retxAlloc->grnt.hop = 0;
14361 retxAlloc->grnt.delayBit = 0;
14362 retxAlloc->rnti = alloc->rnti;
14363 retxAlloc->ue = NULLP;
14364 retxAlloc->pdcch = FALSE;
14365 retxAlloc->forMsg3 = TRUE;
14366 retxAlloc->raCb = alloc->raCb;
14367 retxAlloc->hqProc = alloc->hqProc;
14368 rgSCHUhmRetx(retxAlloc->hqProc);
14375 * @brief Uplink Scheduling Handler.
14379 * Function: rgSCHCmnUlAlloc
14380 * Purpose: This function Handles Uplink Scheduling.
14382 * Invoked by: Common Scheduler
14384 * @param[in] RgSchCellCb *cell
14387 /* ccpu00132653- The definition of this function made common for TDD and FDD*/
14389 PRIVATE Void rgSCHCmnUlAlloc
14394 PRIVATE Void rgSCHCmnUlAlloc(cell)
14398 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14399 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
14400 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
14401 RgSchCmnUlRbAllocInfo allocInfo;
14402 RgSchCmnUlRbAllocInfo *allocInfoRef = &allocInfo;
14408 TRC2(rgSCHCmnUlAlloc);
14410 /* Initializing RgSchCmnUlRbAllocInfo structure */
14411 rgSCHCmnInitUlRbAllocInfo(allocInfoRef);
14413 /* Get Uplink Subframe */
14414 allocInfoRef->sf = &cellUl->ulSfArr[cellUl->schdIdx];
14416 /* initializing the UL PRB count */
14417 allocInfoRef->sf->totPrb = 0;
14421 rgSCHCmnSpsUlTti(cell, allocInfoRef);
14424 if(*allocInfoRef->sf->allocCountRef == 0)
14428 if ((hole = rgSCHUtlUlHoleFirst(allocInfoRef->sf)) != NULLP)
14430 /* Sanity check of holeDb */
14431 if (allocInfoRef->sf->holeDb->count == 1 && hole->start == 0)
14433 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
14434 /* Re-Initialize available subbands because of CFI change*/
14435 allocInfoRef->sf->availSubbands = cell->dynCfiCb.\
14436 bwInfo[cellDl->currCfi].numSb;
14437 /*Currently initializing 5gtf ulsf specific initialization here.
14438 need to do at proper place */
14440 allocInfoRef->sf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
14441 allocInfoRef->sf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
14442 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
14444 allocInfoRef->sf->sfBeamInfo[idx].totVrbgAllocated = 0;
14445 allocInfoRef->sf->sfBeamInfo[idx].totVrbgRequired = 0;
14446 allocInfoRef->sf->sfBeamInfo[idx].vrbgStart = 0;
14452 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
14453 "Error! holeDb sanity check failed");
14458 /* Fix: Adaptive re-transmissions prioritised over other transmissions */
14459 /* perform adaptive retransmissions */
14460 rgSCHCmnUlSfReTxAllocs(cell, allocInfoRef->sf);
14464 /* Fix: syed Adaptive Msg3 Retx crash. Release all
14465 Harq processes for which adap Retx failed, to avoid
14466 blocking. This step should be done before New TX
14467 scheduling to make hqProc available. Right now we
14468 dont check if proc is in adap Retx list for considering
14469 it to be available. But now with this release that
14470 functionality would be correct. */
14472 rgSCHCmnUlSfRlsRetxProcs(cell, allocInfoRef->sf);
14475 /* Specific UL scheduler to perform UE scheduling */
14476 cellSch->apisUl->rgSCHUlSched(cell, allocInfoRef);
14478 /* Call UL RB allocator module */
14479 rgSCHCmnAllocUlRb(cell, allocInfoRef);
14481 /* Do group power control for PUSCH */
14482 rgSCHCmnGrpPwrCntrlPusch(cell, allocInfoRef->sf);
14484 cell->sc.apis->rgSCHDrxStrtInActvTmrInUl(cell);
14486 rgSCHCmnUlAllocFnlz(cell, allocInfoRef);
14487 if(5000 == g5gtfTtiCnt)
14489 ul5gtfsidDlAlreadyMarkUl = 0;
14490 ul5gtfsidDlSchdPass = 0;
14491 ul5gtfsidUlMarkUl = 0;
14492 ul5gtfTotSchdCnt = 0;
14500 * @brief send Subframe Allocations.
14504 * Function: rgSCHCmnSndCnsldtInfo
14505 * Purpose: Send the scheduled
14506 * allocations to MAC for StaInd generation to Higher layers and
14507 * for MUXing. PST's RgInfSfAlloc to MAC instance.
14509 * Invoked by: Common Scheduler
14511 * @param[in] RgSchCellCb *cell
14515 PUBLIC Void rgSCHCmnSndCnsldtInfo
14520 PUBLIC Void rgSCHCmnSndCnsldtInfo(cell)
14524 RgInfSfAlloc *subfrmAlloc;
14526 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14528 TRC2(rgSCHCmnSndCnsldtInfo);
14530 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14532 /* Send the allocations to MAC for MUXing */
14533 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
14534 subfrmAlloc->cellId = cell->cellId;
14535 /* Populate the List of UEs needing PDB-based Flow control */
14536 cellSch->apisDl->rgSCHDlFillFlwCtrlInfo(cell, subfrmAlloc);
14538 if((subfrmAlloc->rarInfo.numRaRntis) ||
14540 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14541 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14542 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14544 (subfrmAlloc->ueInfo.numUes) ||
14545 (subfrmAlloc->cmnLcInfo.bitMask) ||
14546 (subfrmAlloc->ulUeInfo.numUes) ||
14547 (subfrmAlloc->flowCntrlInfo.numUes))
14549 if((subfrmAlloc->rarInfo.numRaRntis) ||
14551 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14552 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14553 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14555 (subfrmAlloc->ueInfo.numUes) ||
14556 (subfrmAlloc->cmnLcInfo.bitMask) ||
14557 (subfrmAlloc->flowCntrlInfo.numUes))
14560 RgSchMacSfAlloc(&pst, subfrmAlloc);
14563 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_NUM_SUB_FRAMES;
14565 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_SF_ALLOC_SIZE;
14571 * @brief Consolidate Subframe Allocations.
14575 * Function: rgSCHCmnCnsldtSfAlloc
14576 * Purpose: Consolidate Subframe Allocations.
14578 * Invoked by: Common Scheduler
14580 * @param[in] RgSchCellCb *cell
14584 PUBLIC Void rgSCHCmnCnsldtSfAlloc
14589 PUBLIC Void rgSCHCmnCnsldtSfAlloc(cell)
14593 RgInfSfAlloc *subfrmAlloc;
14594 CmLteTimingInfo frm;
14596 CmLListCp dlDrxInactvTmrLst;
14597 CmLListCp dlInActvLst;
14598 CmLListCp ulInActvLst;
14599 RgSchCmnCell *cellSch = NULLP;
14601 TRC2(rgSCHCmnCnsldtSfAlloc);
14603 cmLListInit(&dlDrxInactvTmrLst);
14604 cmLListInit(&dlInActvLst);
14605 cmLListInit(&ulInActvLst);
14607 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14609 /* Get Downlink Subframe */
14610 frm = cell->crntTime;
14611 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
14612 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14614 /* Fill the allocation Info */
14615 rgSCHUtlFillRgInfRarInfo(dlSf, subfrmAlloc, cell);
14618 rgSCHUtlFillRgInfUeInfo(dlSf, cell, &dlDrxInactvTmrLst,
14619 &dlInActvLst, &ulInActvLst);
14620 #ifdef RG_PFS_STATS
14621 cell->totalPrb += dlSf->bwAssigned;
14623 /* Mark the following Ues inactive for UL*/
14624 cellSch = RG_SCH_CMN_GET_CELL(cell);
14626 /* Calling Scheduler specific function with DRX inactive UE list*/
14627 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInActvLst);
14628 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInActvLst);
14631 /*re/start DRX inactivity timer for the UEs*/
14632 (Void)rgSCHDrxStrtInActvTmr(cell,&dlDrxInactvTmrLst,RG_SCH_DRX_DL);
14638 * @brief Initialize the DL Allocation Information Structure.
14642 * Function: rgSCHCmnInitDlRbAllocInfo
14643 * Purpose: Initialize the DL Allocation Information Structure.
14645 * Invoked by: Common Scheduler
14647 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
14651 PRIVATE Void rgSCHCmnInitDlRbAllocInfo
14653 RgSchCmnDlRbAllocInfo *allocInfo
14656 PRIVATE Void rgSCHCmnInitDlRbAllocInfo(allocInfo)
14657 RgSchCmnDlRbAllocInfo *allocInfo;
14660 TRC2(rgSCHCmnInitDlRbAllocInfo);
14661 cmMemset((U8 *)&allocInfo->pcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14662 cmMemset((U8 *)&allocInfo->bcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14663 cmMemset((U8 *)allocInfo->raRspAlloc, (U8)0,
14664 RG_SCH_CMN_MAX_CMN_PDCCH*sizeof(RgSchDlRbAlloc));
14666 allocInfo->msg4Alloc.msg4DlSf = NULLP;
14667 cmLListInit(&allocInfo->msg4Alloc.msg4TxLst);
14668 cmLListInit(&allocInfo->msg4Alloc.msg4RetxLst);
14669 cmLListInit(&allocInfo->msg4Alloc.schdMsg4TxLst);
14670 cmLListInit(&allocInfo->msg4Alloc.schdMsg4RetxLst);
14671 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4TxLst);
14672 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4RetxLst);
14674 allocInfo->ccchSduAlloc.ccchSduDlSf = NULLP;
14675 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduTxLst);
14676 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduRetxLst);
14677 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduTxLst);
14678 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduRetxLst);
14679 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst);
14680 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst);
14683 allocInfo->dedAlloc.dedDlSf = NULLP;
14684 cmLListInit(&allocInfo->dedAlloc.txHqPLst);
14685 cmLListInit(&allocInfo->dedAlloc.retxHqPLst);
14686 cmLListInit(&allocInfo->dedAlloc.schdTxHqPLst);
14687 cmLListInit(&allocInfo->dedAlloc.schdRetxHqPLst);
14688 cmLListInit(&allocInfo->dedAlloc.nonSchdTxHqPLst);
14689 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxHqPLst);
14691 cmLListInit(&allocInfo->dedAlloc.txRetxHqPLst);
14692 cmLListInit(&allocInfo->dedAlloc.schdTxRetxHqPLst);
14693 cmLListInit(&allocInfo->dedAlloc.nonSchdTxRetxHqPLst);
14695 cmLListInit(&allocInfo->dedAlloc.txSpsHqPLst);
14696 cmLListInit(&allocInfo->dedAlloc.retxSpsHqPLst);
14697 cmLListInit(&allocInfo->dedAlloc.schdTxSpsHqPLst);
14698 cmLListInit(&allocInfo->dedAlloc.schdRetxSpsHqPLst);
14699 cmLListInit(&allocInfo->dedAlloc.nonSchdTxSpsHqPLst);
14700 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxSpsHqPLst);
14704 rgSCHLaaCmnInitDlRbAllocInfo (allocInfo);
14707 cmLListInit(&allocInfo->dedAlloc.errIndTxHqPLst);
14708 cmLListInit(&allocInfo->dedAlloc.schdErrIndTxHqPLst);
14709 cmLListInit(&allocInfo->dedAlloc.nonSchdErrIndTxHqPLst);
14714 * @brief Initialize the UL Allocation Information Structure.
14718 * Function: rgSCHCmnInitUlRbAllocInfo
14719 * Purpose: Initialize the UL Allocation Information Structure.
14721 * Invoked by: Common Scheduler
14723 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
14727 PUBLIC Void rgSCHCmnInitUlRbAllocInfo
14729 RgSchCmnUlRbAllocInfo *allocInfo
14732 PUBLIC Void rgSCHCmnInitUlRbAllocInfo(allocInfo)
14733 RgSchCmnUlRbAllocInfo *allocInfo;
14736 TRC2(rgSCHCmnInitUlRbAllocInfo);
14737 allocInfo->sf = NULLP;
14738 cmLListInit(&allocInfo->contResLst);
14739 cmLListInit(&allocInfo->schdContResLst);
14740 cmLListInit(&allocInfo->nonSchdContResLst);
14741 cmLListInit(&allocInfo->ueLst);
14742 cmLListInit(&allocInfo->schdUeLst);
14743 cmLListInit(&allocInfo->nonSchdUeLst);
14749 * @brief Scheduling for PUCCH group power control.
14753 * Function: rgSCHCmnGrpPwrCntrlPucch
14754 * Purpose: This function does group power control for PUCCH
14755 * corresponding to the subframe for which DL UE allocations
14758 * Invoked by: Common Scheduler
14760 * @param[in] RgSchCellCb *cell
14764 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch
14770 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch(cell, dlSf)
14775 TRC2(rgSCHCmnGrpPwrCntrlPucch);
14777 rgSCHPwrGrpCntrlPucch(cell, dlSf);
14783 * @brief Scheduling for PUSCH group power control.
14787 * Function: rgSCHCmnGrpPwrCntrlPusch
14788 * Purpose: This function does group power control, for
14789 * the subframe for which UL allocation has (just) happened.
14791 * Invoked by: Common Scheduler
14793 * @param[in] RgSchCellCb *cell
14794 * @param[in] RgSchUlSf *ulSf
14798 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch
14804 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch(cell, ulSf)
14809 /*removed unused variable *cellSch*/
14810 CmLteTimingInfo frm;
14813 TRC2(rgSCHCmnGrpPwrCntrlPusch);
14815 /* Got to pass DL SF corresponding to UL SF, so get that first.
14816 * There is no easy way of getting dlSf by having the RgSchUlSf*,
14817 * so use the UL delta from current time to get the DL SF. */
14818 frm = cell->crntTime;
14821 if(cell->emtcEnable == TRUE)
14823 RGSCH_INCR_SUB_FRAME_EMTC(frm, TFU_DLCNTRL_DLDELTA);
14828 RGSCH_INCR_SUB_FRAME(frm, TFU_DLCNTRL_DLDELTA);
14830 /* Del filling of dl.time */
14831 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14833 rgSCHPwrGrpCntrlPusch(cell, dlSf, ulSf);
14838 /* Fix: syed align multiple UEs to refresh at same time */
14839 /***********************************************************
14841 * Func : rgSCHCmnApplyUeRefresh
14843 * Desc : Apply UE refresh in CMN and Specific
14844 * schedulers. Data rates and corresponding
14845 * scratchpad variables are updated.
14853 **********************************************************/
14855 PRIVATE S16 rgSCHCmnApplyUeRefresh
14861 PRIVATE S16 rgSCHCmnApplyUeRefresh(cell, ue)
14866 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14868 U32 effNonGbrBsr = 0;
14871 TRC2(rgSCHCmnApplyUeRefresh);
14873 /* Reset the refresh cycle variableCAP */
14874 ue->ul.effAmbr = ue->ul.cfgdAmbr;
14876 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
14878 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
14880 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
14882 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
14884 cmnLcg->effGbr = cmnLcg->cfgdGbr;
14885 cmnLcg->effDeltaMbr = cmnLcg->deltaMbr;
14886 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
14887 /* Considering GBR LCG will be prioritised by UE */
14888 effGbrBsr += cmnLcg->bs;
14889 }/* Else no remaing BS so nonLcg0 will be updated when BSR will be received */
14892 effNonGbrBsr += cmnLcg->reportedBs;
14893 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
14897 effNonGbrBsr = RGSCH_MIN(effNonGbrBsr,ue->ul.effAmbr);
14898 ue->ul.nonGbrLcgBs = effNonGbrBsr;
14900 ue->ul.nonLcg0Bs = effGbrBsr + effNonGbrBsr;
14901 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
14902 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
14905 /* call scheduler specific event handlers
14906 * for refresh timer expiry */
14907 cellSch->apisUl->rgSCHUlUeRefresh(cell, ue);
14908 cellSch->apisDl->rgSCHDlUeRefresh(cell, ue);
14913 /***********************************************************
14915 * Func : rgSCHCmnTmrExpiry
14917 * Desc : Adds an UE to refresh queue, so that the UE is
14918 * periodically triggered to refresh it's GBR and
14927 **********************************************************/
14929 PRIVATE S16 rgSCHCmnTmrExpiry
14931 PTR cb, /* Pointer to timer control block */
14932 S16 tmrEvnt /* Timer Event */
14935 PRIVATE S16 rgSCHCmnTmrExpiry(cb, tmrEvnt)
14936 PTR cb; /* Pointer to timer control block */
14937 S16 tmrEvnt; /* Timer Event */
14940 RgSchUeCb *ue = (RgSchUeCb *)cb;
14941 RgSchCellCb *cell = ue->cell;
14942 #if (ERRCLASS & ERRCLS_DEBUG)
14945 TRC2(rgSCHCmnTmrExpiry);
14947 #if (ERRCLASS & ERRCLS_DEBUG)
14948 if (tmrEvnt != RG_SCH_CMN_EVNT_UE_REFRESH)
14950 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnTmrExpiry(): Invalid "
14951 "timer event CRNTI:%d",ue->ueId);
14958 rgSCHCmnApplyUeRefresh(cell, ue);
14960 rgSCHCmnAddUeToRefreshQ(cell, ue, RG_SCH_CMN_REFRESH_TIME);
14965 /***********************************************************
14967 * Func : rgSCHCmnTmrProc
14969 * Desc : Timer entry point per cell. Timer
14970 * processing is triggered at every frame boundary
14979 **********************************************************/
14981 PRIVATE S16 rgSCHCmnTmrProc
14986 PRIVATE S16 rgSCHCmnTmrProc(cell)
14990 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
14991 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
14992 /* Moving the assignment of scheduler pointer
14993 to available scope for optimization */
14994 TRC2(rgSCHCmnTmrProc);
14996 if ((cell->crntTime.slot % RGSCH_NUM_SUB_FRAMES_5G) == 0)
14998 /* Reset the counters periodically */
14999 if ((cell->crntTime.sfn % RG_SCH_CMN_CSG_REFRESH_TIME) == 0)
15001 RG_SCH_RESET_HCSG_DL_PRB_CNTR(cmnDlCell);
15002 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cmnUlCell);
15004 if ((cell->crntTime.sfn % RG_SCH_CMN_OVRLDCTRL_REFRESH_TIME) == 0)
15007 cell->measurements.ulTpt = ((cell->measurements.ulTpt * 95) + ( cell->measurements.ulBytesCnt * 5))/100;
15008 cell->measurements.dlTpt = ((cell->measurements.dlTpt * 95) + ( cell->measurements.dlBytesCnt * 5))/100;
15010 rgSCHUtlCpuOvrLdAdjItbsCap(cell);
15011 /* reset cell level tpt measurements for next cycle */
15012 cell->measurements.ulBytesCnt = 0;
15013 cell->measurements.dlBytesCnt = 0;
15015 /* Comparing with Zero instead of % is being done for efficiency.
15016 * If Timer resolution changes then accordingly update the
15017 * macro RG_SCH_CMN_REFRESH_TIMERES */
15018 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
15019 cmPrcTmr(&sched->tmrTqCp, sched->tmrTq, (PFV)rgSCHCmnTmrExpiry);
15026 /***********************************************************
15028 * Func : rgSchCmnUpdCfiVal
15030 * Desc : Update the CFI value if CFI switch was done
15038 **********************************************************/
15040 PRIVATE Void rgSchCmnUpdCfiVal
15046 PRIVATE Void rgSchCmnUpdCfiVal(cell, delta)
15052 CmLteTimingInfo pdsch;
15053 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15062 TRC2(rgSchCmnUpdCfiVal);
15064 pdsch = cell->crntTime;
15065 RGSCH_INCR_SUB_FRAME(pdsch, delta);
15066 dlSf = rgSCHUtlSubFrmGet(cell, pdsch);
15067 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15068 *change happens in that SF then UL PDCCH allocation happens with old CFI
15069 *but CFI in control Req goes updated one since it was stored in the CELL
15071 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15072 if(cell->dynCfiCb.pdcchSfIdx != 0xFF)
15075 dlIdx = rgSCHUtlGetDlSfIdx(cell, &pdsch);
15077 dlIdx = (((pdsch.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (pdsch.slot % RGSCH_NUM_SUB_FRAMES));
15078 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15080 /* If current downlink subframe index is same as pdcch SF index,
15081 * perform the switching of CFI in this subframe */
15082 if(cell->dynCfiCb.pdcchSfIdx == dlIdx)
15084 cellCmnDl->currCfi = cellCmnDl->newCfi;
15085 cell->dynCfiCb.pdcchSfIdx = 0xFF;
15087 /* Updating the nCce value based on the new CFI */
15089 splSfCfi = cellCmnDl->newCfi;
15090 for(idx = 0; idx < cell->numDlSubfrms; idx++)
15092 tddSf = cell->subFrms[idx];
15094 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][tddSf->sfNum];
15096 if(tddSf->sfType == RG_SCH_SPL_SF_DATA)
15098 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
15100 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
15104 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][cellCmnDl->currCfi];
15107 /* Setting the switch over window length based on config index.
15108 * During switch over period all the UL trnsmissions are Acked
15110 cell->dynCfiCb.switchOvrWinLen =
15111 rgSchCfiSwitchOvrWinLen[cell->ulDlCfgIdx];
15113 cell->nCce = cell->dynCfiCb.cfi2NCceTbl[0][cellCmnDl->currCfi];
15114 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15115 *change happens in that SF then UL PDCCH allocation happens with old CFI
15116 *but CFI in control Req goes updated one since it was stored in the CELL
15118 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15119 cell->dynCfiCb.switchOvrWinLen = rgSchCfiSwitchOvrWinLen[7];
15127 /***********************************************************
15129 * Func : rgSchCmnUpdtPdcchSfIdx
15131 * Desc : Update the switch over window length
15139 **********************************************************/
15142 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15149 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, sfNum)
15156 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15162 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx)
15170 TRC2(rgSchCmnUpdtPdcchSfIdx);
15172 /* Resetting the parameters on CFI switching */
15173 cell->dynCfiCb.cceUsed = 0;
15174 cell->dynCfiCb.lowCceCnt = 0;
15176 cell->dynCfiCb.cceFailSum = 0;
15177 cell->dynCfiCb.cceFailCnt = 0;
15178 cell->dynCfiCb.prevCceFailIdx = 0;
15180 cell->dynCfiCb.switchOvrInProgress = TRUE;
15182 for(idx = 0; idx < cell->dynCfiCb.numFailSamples; idx++)
15184 cell->dynCfiCb.cceFailSamples[idx] = 0;
15187 cell->dynCfiCb.ttiCnt = 0;
15189 cell->dynCfiCb.cfiSwitches++;
15190 cfiSwitchCnt = cell->dynCfiCb.cfiSwitches;
15193 cell->dynCfiCb.pdcchSfIdx = (dlIdx +
15194 rgSchTddPdcchSfIncTbl[cell->ulDlCfgIdx][sfNum]) % cell->numDlSubfrms;
15196 cell->dynCfiCb.pdcchSfIdx = (dlIdx + RG_SCH_CFI_APPLY_DELTA) % \
15197 RGSCH_NUM_DL_slotS;
15201 /***********************************************************
15203 * Func : rgSchCmnUpdCfiDb
15205 * Desc : Update the counters related to dynamic
15206 * CFI feature in cellCb.
15214 **********************************************************/
15216 PUBLIC Void rgSchCmnUpdCfiDb
15222 PUBLIC Void rgSchCmnUpdCfiDb(cell, delta)
15227 CmLteTimingInfo frm;
15233 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15234 U8 nCceLowerCfi = 0;
15241 TRC2(rgSchCmnUpdCfiDb);
15243 /* Get Downlink Subframe */
15244 frm = cell->crntTime;
15245 RGSCH_INCR_SUB_FRAME(frm, delta);
15248 dlIdx = rgSCHUtlGetDlSfIdx(cell, &frm);
15249 dlSf = cell->subFrms[dlIdx];
15250 isHiDci0 = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15252 /* Changing the idexing
15253 so that proper subframe is selected */
15254 dlIdx = (((frm.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (frm.slot % RGSCH_NUM_SUB_FRAMES));
15255 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15256 dlSf = cell->subFrms[dlIdx];
15259 currCfi = cellSch->dl.currCfi;
15261 if(!cell->dynCfiCb.switchOvrInProgress)
15264 if(!cell->dynCfiCb.isDynCfiEnb)
15266 if(currCfi != cellSch->cfiCfg.cfi)
15268 if(currCfi < cellSch->cfiCfg.cfi)
15270 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15271 cfiIncr = cell->dynCfiCb.cfiIncr;
15275 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15276 cfiDecr = cell->dynCfiCb.cfiDecr;
15283 /* Setting ttiMod to 0 for ttiCnt > 1000 in case if this
15284 * function was not called in UL subframe*/
15285 if(cell->dynCfiCb.ttiCnt > RGSCH_CFI_TTI_MON_INTRVL)
15292 ttiMod = cell->dynCfiCb.ttiCnt % RGSCH_CFI_TTI_MON_INTRVL;
15295 dlSf->dlUlBothCmplt++;
15297 if((dlSf->dlUlBothCmplt == 2) || (!isHiDci0))
15299 if(dlSf->dlUlBothCmplt == 2)
15302 /********************STEP UP CRITERIA********************/
15303 /* Updating the CCE failure count parameter */
15304 cell->dynCfiCb.cceFailCnt += dlSf->isCceFailure;
15305 cell->dynCfiCb.cceFailSum += dlSf->isCceFailure;
15307 /* Check if cfi step up can be performed */
15308 if(currCfi < cell->dynCfiCb.maxCfi)
15310 if(cell->dynCfiCb.cceFailSum >= cell->dynCfiCb.cfiStepUpTtiCnt)
15312 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15313 cfiIncr = cell->dynCfiCb.cfiIncr;
15318 /********************STEP DOWN CRITERIA********************/
15320 /* Updating the no. of CCE used in this dl subframe */
15321 cell->dynCfiCb.cceUsed += dlSf->cceCnt;
15323 if(currCfi > RGSCH_MIN_CFI_VAL)
15325 /* calculating the number of CCE for next lower CFI */
15327 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15328 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[mPhich][currCfi-1];
15330 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[0][currCfi-1];
15332 if(dlSf->cceCnt < nCceLowerCfi)
15334 /* Updating the count of TTIs in which no. of CCEs
15335 * used were less than the CCEs of next lower CFI */
15336 cell->dynCfiCb.lowCceCnt++;
15341 totalCce = (nCceLowerCfi * cell->dynCfiCb.cfiStepDownTtiCnt *
15342 RGSCH_CFI_CCE_PERCNTG)/100;
15344 if((!cell->dynCfiCb.cceFailSum) &&
15345 (cell->dynCfiCb.lowCceCnt >=
15346 cell->dynCfiCb.cfiStepDownTtiCnt) &&
15347 (cell->dynCfiCb.cceUsed < totalCce))
15349 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15350 cfiDecr = cell->dynCfiCb.cfiDecr;
15356 cceFailIdx = ttiMod/cell->dynCfiCb.failSamplePrd;
15358 if(cceFailIdx != cell->dynCfiCb.prevCceFailIdx)
15360 /* New sample period has started. Subtract the old count
15361 * from the new sample period */
15362 cell->dynCfiCb.cceFailSum -= cell->dynCfiCb.cceFailSamples[cceFailIdx];
15364 /* Store the previous sample period data */
15365 cell->dynCfiCb.cceFailSamples[cell->dynCfiCb.prevCceFailIdx]
15366 = cell->dynCfiCb.cceFailCnt;
15368 cell->dynCfiCb.prevCceFailIdx = cceFailIdx;
15370 /* Resetting the CCE failure count as zero for next sample period */
15371 cell->dynCfiCb.cceFailCnt = 0;
15376 /* Restting the parametrs after Monitoring Interval expired */
15377 cell->dynCfiCb.cceUsed = 0;
15378 cell->dynCfiCb.lowCceCnt = 0;
15379 cell->dynCfiCb.ttiCnt = 0;
15382 cell->dynCfiCb.ttiCnt++;
15386 if(cellSch->dl.newCfi != cellSch->dl.currCfi)
15389 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, dlSf->sfNum);
15391 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx);
15398 * @brief Dl Scheduler for Broadcast and Common channel scheduling.
15402 * Function: rgSCHCmnDlCommonChSch
15403 * Purpose: This function schedules DL Common channels for LTE.
15404 * Invoked by TTI processing in TOM. Scheduling is done for
15405 * BCCH, PCCH, Msg4, CCCH SDU, RAR in that order
15407 * Invoked by: TOM (TTI processing)
15409 * @param[in] RgSchCellCb *cell
15413 PUBLIC Void rgSCHCmnDlCommonChSch
15418 PUBLIC Void rgSCHCmnDlCommonChSch(cell)
15422 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15424 TRC2(rgSCHCmnDlCommonChSch);
15426 cellSch->apisDl->rgSCHDlTickForPdbTrkng(cell);
15427 rgSchCmnUpdCfiVal(cell, RG_SCH_CMN_DL_DELTA);
15429 /* handle Inactive UEs for DL */
15430 rgSCHCmnHdlDlInactUes(cell);
15432 /* Send a Tick to Refresh Timer */
15433 rgSCHCmnTmrProc(cell);
15435 if (cell->isDlDataAllwd && (cell->stopSiSch == FALSE))
15437 rgSCHCmnInitRbAlloc(cell);
15438 /* Perform DL scheduling of BCCH, PCCH */
15439 rgSCHCmnDlBcchPcchAlloc(cell);
15443 if(cell->siCb.inWindow != 0)
15445 cell->siCb.inWindow--;
15448 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
15450 rgSCHCmnDlCcchRarAlloc(cell);
15456 * @brief Scheduler invocation per TTI.
15460 * Function: rgSCHCmnUlSch
15461 * Purpose: This function implements UL scheduler alone. This is to
15462 * be able to perform scheduling with more flexibility.
15464 * Invoked by: TOM (TTI processing)
15466 * @param[in] RgSchCellCb *cell
15470 PUBLIC Void rgSCHCmnUlSch
15475 PUBLIC Void rgSCHCmnUlSch(cell)
15479 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15481 TRC2(rgSCHCmnUlSch);
15485 if(TRUE == rgSCHLaaSCellEnabled(cell))
15491 if(cellSch->ul.schdIdx != RGSCH_INVALID_INFO)
15493 rgSchCmnUpdCfiVal(cell, TFU_ULCNTRL_DLDELTA);
15495 /* Handle Inactive UEs for UL */
15496 rgSCHCmnHdlUlInactUes(cell);
15497 /* Perform UL Scheduling EVERY TTI */
15498 rgSCHCmnUlAlloc(cell);
15500 /* Calling function to update CFI parameters*/
15501 rgSchCmnUpdCfiDb(cell, TFU_ULCNTRL_DLDELTA);
15503 if(cell->dynCfiCb.switchOvrWinLen > 0)
15505 /* Decrementing the switchover window length */
15506 cell->dynCfiCb.switchOvrWinLen--;
15508 if(!cell->dynCfiCb.switchOvrWinLen)
15510 if(cell->dynCfiCb.dynCfiRecfgPend)
15512 /* Toggling the Dynamic CFI enabling */
15513 cell->dynCfiCb.isDynCfiEnb ^= 1;
15514 rgSCHDynCfiReCfg(cell, cell->dynCfiCb.isDynCfiEnb);
15515 cell->dynCfiCb.dynCfiRecfgPend = FALSE;
15517 cell->dynCfiCb.switchOvrInProgress = FALSE;
15525 rgSCHCmnSpsUlTti(cell, NULLP);
15535 * @brief This function updates the scheduler with service for an UE.
15539 * Function: rgSCHCmnDlDedBoUpd
15540 * Purpose: This function should be called whenever there is a
15541 * change BO for a service.
15543 * Invoked by: BO and Scheduler
15545 * @param[in] RgSchCellCb* cell
15546 * @param[in] RgSchUeCb* ue
15547 * @param[in] RgSchDlLcCb* svc
15552 PUBLIC Void rgSCHCmnDlDedBoUpd
15559 PUBLIC Void rgSCHCmnDlDedBoUpd(cell, ue, svc)
15565 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15566 TRC2(rgSCHCmnDlDedBoUpd);
15568 /* RACHO : if UEs idle time exceeded and a BO update
15569 * is received, then add UE to the pdcch Order Q */
15570 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
15572 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue, cell);
15573 /* If PDCCH order is already triggered and we are waiting for
15574 * RACH from UE then do not add to PdcchOdrQ. */
15575 if (ueDl->rachInfo.rapIdLnk.node == NULLP)
15577 rgSCHCmnDlAdd2PdcchOdrQ(cell, ue);
15583 /* If SPS service, invoke SPS module */
15584 if (svc->dlLcSpsCfg.isSpsEnabled)
15586 rgSCHCmnSpsDlDedBoUpd(cell, ue, svc);
15587 /* Note: Retrun from here, no update needed in other schedulers */
15592 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
15594 cellSch->apisEmtcDl->rgSCHDlDedBoUpd(cell, ue, svc);
15595 //printf("rgSCHEMTCDlDedBoUpd\n");
15600 cellSch->apisDl->rgSCHDlDedBoUpd(cell, ue, svc);
15605 rgSCHSCellDlDedBoUpd(cell, ue, svc);
15613 * @brief Removes an UE from Cell's TA List.
15617 * Function: rgSCHCmnRmvFrmTaLst
15618 * Purpose: Removes an UE from Cell's TA List.
15620 * Invoked by: Specific Scheduler
15622 * @param[in] RgSchCellCb* cell
15623 * @param[in] RgSchUeCb* ue
15628 PUBLIC Void rgSCHCmnRmvFrmTaLst
15634 PUBLIC Void rgSCHCmnRmvFrmTaLst(cell, ue)
15639 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15640 TRC2(rgSCHCmnRmvFrmTaLst);
15643 if(cell->emtcEnable && ue->isEmtcUe)
15645 rgSCHEmtcRmvFrmTaLst(cellCmnDl,ue);
15650 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
15651 ue->dlTaLnk.node = (PTR)NULLP;
15656 /* Fix: syed Remove the msg4Proc from cell
15657 * msg4Retx Queue. I have used CMN scheduler function
15658 * directly. Please define a new API and call this
15659 * function through that. */
15662 * @brief This function removes MSG4 HARQ process from cell RETX Queues.
15666 * Function: rgSCHCmnDlMsg4ProcRmvFrmRetx
15667 * Purpose: This function removes MSG4 HARQ process from cell RETX Queues.
15669 * Invoked by: UE/RACB deletion.
15671 * @param[in] RgSchCellCb* cell
15672 * @param[in] RgSchDlHqProc* hqP
15677 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx
15680 RgSchDlHqProcCb *hqP
15683 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx(cell, hqP)
15685 RgSchDlHqProcCb *hqP;
15688 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15689 TRC2(rgSCHCmnDlMsg4ProcRmvFrmRetx);
15691 if (hqP->tbInfo[0].ccchSchdInfo.retxLnk.node)
15693 if (hqP->hqE->msg4Proc == hqP)
15695 cmLListDelFrm(&cellSch->dl.msg4RetxLst, \
15696 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15697 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15700 else if(hqP->hqE->ccchSduProc == hqP)
15702 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
15703 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15704 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15713 * @brief This function adds a HARQ process for retx.
15717 * Function: rgSCHCmnDlProcAddToRetx
15718 * Purpose: This function adds a HARQ process to retransmission
15719 * queue. This may be performed when a HARQ ack is
15722 * Invoked by: HARQ feedback processing
15724 * @param[in] RgSchCellCb* cell
15725 * @param[in] RgSchDlHqProc* hqP
15730 PUBLIC Void rgSCHCmnDlProcAddToRetx
15733 RgSchDlHqProcCb *hqP
15736 PUBLIC Void rgSCHCmnDlProcAddToRetx(cell, hqP)
15738 RgSchDlHqProcCb *hqP;
15741 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15742 TRC2(rgSCHCmnDlProcAddToRetx);
15744 if (hqP->hqE->msg4Proc == hqP) /* indicating msg4 transmission */
15746 cmLListAdd2Tail(&cellSch->dl.msg4RetxLst, \
15747 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15748 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15751 else if(hqP->hqE->ccchSduProc == hqP)
15753 /*If CCCH SDU being transmitted without cont res CE*/
15754 cmLListAdd2Tail(&cellSch->dl.ccchSduRetxLst,
15755 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15756 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15762 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
15764 /* Invoke SPS module for SPS HARQ proc re-transmission handling */
15765 rgSCHCmnSpsDlProcAddToRetx(cell, hqP);
15768 #endif /* LTEMAC_SPS */
15770 if((TRUE == cell->emtcEnable)
15771 && (TRUE == hqP->hqE->ue->isEmtcUe))
15773 cellSch->apisEmtcDl->rgSCHDlProcAddToRetx(cell, hqP);
15778 cellSch->apisDl->rgSCHDlProcAddToRetx(cell, hqP);
15786 * @brief This function performs RI validation and
15787 * updates it to the ueCb.
15791 * Function: rgSCHCmnDlSetUeRi
15792 * Purpose: This function performs RI validation and
15793 * updates it to the ueCb.
15795 * Invoked by: rgSCHCmnDlCqiInd
15797 * @param[in] RgSchCellCb *cell
15798 * @param[in] RgSchUeCb *ue
15800 * @param[in] Bool isPeriodic
15805 PRIVATE Void rgSCHCmnDlSetUeRi
15813 PRIVATE Void rgSCHCmnDlSetUeRi(cell, ue, ri, isPer)
15820 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15821 RgSchCmnUeInfo *ueSchCmn = RG_SCH_CMN_GET_CMN_UE(ue);
15822 TRC2(rgSCHCmnDlSetUeRi);
15825 RgSchUePCqiCb *cqiCb = RG_SCH_GET_UE_CELL_CQI_CB(ue,cell);
15830 /* FIX for RRC Reconfiguration issue */
15831 /* ccpu00140894- During Tx Mode transition RI report will not entertained for
15832 * specific during which SCH expecting UE can complete TX mode transition*/
15833 if (ue->txModeTransCmplt == FALSE)
15838 /* Restrict the Number of TX layers to cell->numTxAntPorts.
15839 * Protection from invalid RI values. */
15840 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
15842 /* Special case of converting PMI to sane value when
15843 * there is a switch in RI from 1 to 2 and PMI reported
15844 * for RI=1 is invalid for RI=2 */
15845 if ((cell->numTxAntPorts == 2) && (ue->mimoInfo.txMode == RGR_UE_TM_4))
15847 if ((ri == 2) && ( ueDl->mimoInfo.ri == 1))
15849 ueDl->mimoInfo.pmi = (ueDl->mimoInfo.pmi < 2)? 1:2;
15853 /* Restrict the Number of TX layers according to the UE Category */
15854 ueDl->mimoInfo.ri = RGSCH_MIN(ri, rgUeCatTbl[ueSchCmn->ueCat].maxTxLyrs);
15856 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].riCnt[ueDl->mimoInfo.ri-1]++;
15857 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15861 ue->tenbStats->stats.nonPersistent.sch[0].riCnt[ueDl->mimoInfo.ri-1]++;
15862 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15868 /* If RI is from Periodic CQI report */
15869 cqiCb->perRiVal = ueDl->mimoInfo.ri;
15870 /* Reset at every Periodic RI Reception */
15871 cqiCb->invalidateCqi = FALSE;
15875 /* If RI is from Aperiodic CQI report */
15876 if (cqiCb->perRiVal != ueDl->mimoInfo.ri)
15878 /* if this aperRI is different from last reported
15879 * perRI then invalidate all CQI reports till next
15881 cqiCb->invalidateCqi = TRUE;
15885 cqiCb->invalidateCqi = FALSE;
15890 if (ueDl->mimoInfo.ri > 1)
15892 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15894 else if (ue->mimoInfo.txMode == RGR_UE_TM_3) /* ri == 1 */
15896 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15904 * @brief This function performs PMI validation and
15905 * updates it to the ueCb.
15909 * Function: rgSCHCmnDlSetUePmi
15910 * Purpose: This function performs PMI validation and
15911 * updates it to the ueCb.
15913 * Invoked by: rgSCHCmnDlCqiInd
15915 * @param[in] RgSchCellCb *cell
15916 * @param[in] RgSchUeCb *ue
15917 * @param[in] U8 pmi
15922 PRIVATE S16 rgSCHCmnDlSetUePmi
15929 PRIVATE S16 rgSCHCmnDlSetUePmi(cell, ue, pmi)
15935 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15936 TRC2(rgSCHCmnDlSetUePmi);
15938 if (ue->txModeTransCmplt == FALSE)
15943 if (cell->numTxAntPorts == 2)
15949 if (ueDl->mimoInfo.ri == 2)
15951 /*ccpu00118150 - MOD - changed pmi value validation from 0 to 2*/
15952 /* PMI 2 and 3 are invalid incase of 2 TxAnt and 2 Layered SM */
15953 if (pmi == 2 || pmi == 3)
15957 ueDl->mimoInfo.pmi = pmi+1;
15961 ueDl->mimoInfo.pmi = pmi;
15964 else if (cell->numTxAntPorts == 4)
15970 ueDl->mimoInfo.pmi = pmi;
15972 /* Reset the No PMI Flag in forceTD */
15973 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
15978 * @brief This function Updates the DL CQI on PUCCH for the UE.
15982 * Function: rgSCHCmnDlProcCqiMode10
15984 * This function updates the DL CQI on PUCCH for the UE.
15986 * Invoked by: rgSCHCmnDlCqiOnPucchInd
15988 * Processing Steps:
15990 * @param[in] RgSchCellCb *cell
15991 * @param[in] RgSchUeCb *ue
15992 * @param[in] TfuDlCqiRpt *dlCqiRpt
15997 #ifdef RGR_CQI_REPT
15999 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16003 TfuDlCqiPucch *pucchCqi,
16007 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail)
16010 TfuDlCqiPucch *pucchCqi;
16015 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16019 TfuDlCqiPucch *pucchCqi
16022 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi)
16025 TfuDlCqiPucch *pucchCqi;
16029 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16030 TRC2(rgSCHCmnDlProcCqiMode10);
16032 if (pucchCqi->u.mode10Info.type == TFU_RPT_CQI)
16034 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16035 /* Checking whether the decoded CQI is a value between 1 and 15*/
16036 if((pucchCqi->u.mode10Info.u.cqi) && (pucchCqi->u.mode10Info.u.cqi
16037 < RG_SCH_CMN_MAX_CQI))
16039 ueDl->cqiFlag = TRUE;
16040 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode10Info.u.cqi;
16041 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16042 /* ccpu00117452 - MOD - Changed macro name from
16043 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16044 #ifdef RGR_CQI_REPT
16045 *isCqiAvail = TRUE;
16053 else if (pucchCqi->u.mode10Info.type == TFU_RPT_RI)
16055 if ( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode10Info.u.ri) )
16057 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode10Info.u.ri,
16062 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16063 pucchCqi->u.mode10Info.u.ri,ue->ueId);
16070 * @brief This function Updates the DL CQI on PUCCH for the UE.
16074 * Function: rgSCHCmnDlProcCqiMode11
16076 * This function updates the DL CQI on PUCCH for the UE.
16078 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16080 * Processing Steps:
16081 * Process CQI MODE 11
16082 * @param[in] RgSchCellCb *cell
16083 * @param[in] RgSchUeCb *ue
16084 * @param[in] TfuDlCqiRpt *dlCqiRpt
16089 #ifdef RGR_CQI_REPT
16091 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16095 TfuDlCqiPucch *pucchCqi,
16097 Bool *is2ndCwCqiAvail
16100 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16103 TfuDlCqiPucch *pucchCqi;
16105 Bool *is2ndCwCqiAvail;
16109 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16113 TfuDlCqiPucch *pucchCqi
16116 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi)
16119 TfuDlCqiPucch *pucchCqi;
16123 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16124 TRC2(rgSCHCmnDlProcCqiMode11);
16126 if (pucchCqi->u.mode11Info.type == TFU_RPT_CQI)
16128 ue->mimoInfo.puschFdbkVld = FALSE;
16129 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16130 if((pucchCqi->u.mode11Info.u.cqi.cqi) &&
16131 (pucchCqi->u.mode11Info.u.cqi.cqi < RG_SCH_CMN_MAX_CQI))
16133 ueDl->cqiFlag = TRUE;
16134 /* ccpu00117452 - MOD - Changed macro name from
16135 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16136 #ifdef RGR_CQI_REPT
16137 *isCqiAvail = TRUE;
16139 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode11Info.u.cqi.cqi;
16140 if (pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.pres)
16142 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16143 ueDl->mimoInfo.cwInfo[1].cqi, \
16144 pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.val);
16145 #ifdef RGR_CQI_REPT
16146 /* ccpu00117259 - ADD - Considering second codeword CQI info
16147 incase of MIMO for CQI Reporting */
16148 *is2ndCwCqiAvail = TRUE;
16156 rgSCHCmnDlSetUePmi(cell, ue, \
16157 pucchCqi->u.mode11Info.u.cqi.pmi);
16159 else if (pucchCqi->u.mode11Info.type == TFU_RPT_RI)
16161 if( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode11Info.u.ri))
16163 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode11Info.u.ri,
16168 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16169 pucchCqi->u.mode11Info.u.ri,ue->ueId);
16176 * @brief This function Updates the DL CQI on PUCCH for the UE.
16180 * Function: rgSCHCmnDlProcCqiMode20
16182 * This function updates the DL CQI on PUCCH for the UE.
16184 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16186 * Processing Steps:
16187 * Process CQI MODE 20
16188 * @param[in] RgSchCellCb *cell
16189 * @param[in] RgSchUeCb *ue
16190 * @param[in] TfuDlCqiRpt *dlCqiRpt
16195 #ifdef RGR_CQI_REPT
16197 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16201 TfuDlCqiPucch *pucchCqi,
16205 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail )
16208 TfuDlCqiPucch *pucchCqi;
16213 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16217 TfuDlCqiPucch *pucchCqi
16220 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi)
16223 TfuDlCqiPucch *pucchCqi;
16227 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16228 TRC2(rgSCHCmnDlProcCqiMode20);
16230 if (pucchCqi->u.mode20Info.type == TFU_RPT_CQI)
16232 if (pucchCqi->u.mode20Info.u.cqi.isWideband)
16234 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16235 if((pucchCqi->u.mode20Info.u.cqi.u.wideCqi) &&
16236 (pucchCqi->u.mode20Info.u.cqi.u.wideCqi < RG_SCH_CMN_MAX_CQI))
16238 ueDl->cqiFlag = TRUE;
16239 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode20Info.u.cqi.\
16241 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16242 /* ccpu00117452 - MOD - Changed macro name from
16243 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16244 #ifdef RGR_CQI_REPT
16245 *isCqiAvail = TRUE;
16254 else if (pucchCqi->u.mode20Info.type == TFU_RPT_RI)
16256 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode20Info.u.ri))
16258 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode20Info.u.ri,
16263 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16264 pucchCqi->u.mode20Info.u.ri,ue->ueId);
16272 * @brief This function Updates the DL CQI on PUCCH for the UE.
16276 * Function: rgSCHCmnDlProcCqiMode21
16278 * This function updates the DL CQI on PUCCH for the UE.
16280 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16282 * Processing Steps:
16283 * Process CQI MODE 21
16284 * @param[in] RgSchCellCb *cell
16285 * @param[in] RgSchUeCb *ue
16286 * @param[in] TfuDlCqiRpt *dlCqiRpt
16291 #ifdef RGR_CQI_REPT
16293 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16297 TfuDlCqiPucch *pucchCqi,
16299 Bool *is2ndCwCqiAvail
16302 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16305 TfuDlCqiPucch *pucchCqi;
16306 TfuDlCqiRpt *dlCqiRpt;
16308 Bool *is2ndCwCqiAvail;
16312 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16316 TfuDlCqiPucch *pucchCqi
16319 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi)
16322 TfuDlCqiPucch *pucchCqi;
16326 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16327 TRC2(rgSCHCmnDlProcCqiMode21);
16329 if (pucchCqi->u.mode21Info.type == TFU_RPT_CQI)
16331 ue->mimoInfo.puschFdbkVld = FALSE;
16332 if (pucchCqi->u.mode21Info.u.cqi.isWideband)
16334 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16335 if((pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi) &&
16336 (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi < RG_SCH_CMN_MAX_CQI))
16338 ueDl->cqiFlag = TRUE;
16339 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode21Info.u.cqi.\
16341 if (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.pres)
16343 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16344 ueDl->mimoInfo.cwInfo[1].cqi, \
16345 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.val);
16346 #ifdef RGR_CQI_REPT
16347 /* ccpu00117259 - ADD - Considering second codeword CQI info
16348 incase of MIMO for CQI Reporting */
16349 *is2ndCwCqiAvail = TRUE;
16352 /* ccpu00117452 - MOD - Changed macro name from
16353 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16354 #ifdef RGR_CQI_REPT
16355 *isCqiAvail = TRUE;
16362 rgSCHCmnDlSetUePmi(cell, ue, \
16363 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.pmi);
16366 else if (pucchCqi->u.mode21Info.type == TFU_RPT_RI)
16368 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode21Info.u.ri))
16370 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode21Info.u.ri,
16375 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16376 pucchCqi->u.mode21Info.u.ri,ue->ueId);
16384 * @brief This function Updates the DL CQI on PUCCH for the UE.
16388 * Function: rgSCHCmnDlCqiOnPucchInd
16390 * This function updates the DL CQI on PUCCH for the UE.
16392 * Invoked by: rgSCHCmnDlCqiInd
16394 * Processing Steps:
16395 * - Depending on the reporting mode of the PUCCH, the CQI/PMI/RI values
16396 * are updated and stored for each UE
16398 * @param[in] RgSchCellCb *cell
16399 * @param[in] RgSchUeCb *ue
16400 * @param[in] TfuDlCqiRpt *dlCqiRpt
16405 #ifdef RGR_CQI_REPT
16407 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16411 TfuDlCqiPucch *pucchCqi,
16412 RgrUeCqiRept *ueCqiRept,
16414 Bool *is2ndCwCqiAvail
16417 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16420 TfuDlCqiPucch *pucchCqi;
16421 RgrUeCqiRept *ueCqiRept;
16423 Bool *is2ndCwCqiAvail;
16427 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16431 TfuDlCqiPucch *pucchCqi
16434 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi)
16437 TfuDlCqiPucch *pucchCqi;
16441 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16442 TRC2(rgSCHCmnDlCqiOnPucchInd);
16444 /* ccpu00117452 - MOD - Changed
16445 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16446 #ifdef RGR_CQI_REPT
16447 /* Save CQI mode information in the report */
16448 ueCqiRept->cqiMode = pucchCqi->mode;
16451 switch(pucchCqi->mode)
16453 case TFU_PUCCH_CQI_MODE10:
16454 #ifdef RGR_CQI_REPT
16455 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail);
16457 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi);
16459 ueDl->cqiFlag = TRUE;
16461 case TFU_PUCCH_CQI_MODE11:
16462 #ifdef RGR_CQI_REPT
16463 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail,
16466 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi);
16468 ueDl->cqiFlag = TRUE;
16470 case TFU_PUCCH_CQI_MODE20:
16471 #ifdef RGR_CQI_REPT
16472 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail);
16474 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi);
16476 ueDl->cqiFlag = TRUE;
16478 case TFU_PUCCH_CQI_MODE21:
16479 #ifdef RGR_CQI_REPT
16480 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail,
16483 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi);
16485 ueDl->cqiFlag = TRUE;
16489 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unknown CQI Mode %d",
16490 pucchCqi->mode,ue->ueId);
16491 /* ccpu00117452 - MOD - Changed macro name from
16492 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16493 #ifdef RGR_CQI_REPT
16494 *isCqiAvail = FALSE;
16501 } /* rgSCHCmnDlCqiOnPucchInd */
16505 * @brief This function Updates the DL CQI on PUSCH for the UE.
16509 * Function: rgSCHCmnDlCqiOnPuschInd
16511 * This function updates the DL CQI on PUSCH for the UE.
16513 * Invoked by: rgSCHCmnDlCqiInd
16515 * Processing Steps:
16516 * - Depending on the reporting mode of the PUSCH, the CQI/PMI/RI values
16517 * are updated and stored for each UE
16519 * @param[in] RgSchCellCb *cell
16520 * @param[in] RgSchUeCb *ue
16521 * @param[in] TfuDlCqiRpt *dlCqiRpt
16526 #ifdef RGR_CQI_REPT
16528 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16532 TfuDlCqiPusch *puschCqi,
16533 RgrUeCqiRept *ueCqiRept,
16535 Bool *is2ndCwCqiAvail
16538 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16541 TfuDlCqiPusch *puschCqi;
16542 RgrUeCqiRept *ueCqiRept;
16544 Bool *is2ndCwCqiAvail;
16548 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16552 TfuDlCqiPusch *puschCqi
16555 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi)
16558 TfuDlCqiPusch *puschCqi;
16562 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16564 TRC2(rgSCHCmnDlCqiOnPuschInd);
16565 if (puschCqi->ri.pres == PRSNT_NODEF)
16567 if (RG_SCH_CMN_IS_RI_VALID(puschCqi->ri.val))
16569 /* Saving the previous ri value to revert back
16570 in case PMI update failed */
16571 if (RGR_UE_TM_4 == ue->mimoInfo.txMode ) /* Cheking for TM4. TM8 check later */
16573 prevRiVal = ueDl->mimoInfo.ri;
16575 rgSCHCmnDlSetUeRi(cell, ue, puschCqi->ri.val, FALSE);
16579 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16580 puschCqi->ri.val,ue->ueId);
16584 ue->mimoInfo.puschFdbkVld = FALSE;
16585 /* ccpu00117452 - MOD - Changed macro name from
16586 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16587 #ifdef RGR_CQI_REPT
16588 /* Save CQI mode information in the report */
16589 ueCqiRept->cqiMode = puschCqi->mode;
16590 /* ccpu00117259 - DEL - removed default setting of isCqiAvail to TRUE */
16593 switch(puschCqi->mode)
16595 case TFU_PUSCH_CQI_MODE_20:
16596 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16597 /* Checking whether the decoded CQI is a value between 1 and 15*/
16598 if((puschCqi->u.mode20Info.wideBandCqi) &&
16599 (puschCqi->u.mode20Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16601 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode20Info.wideBandCqi;
16602 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16603 /* ccpu00117452 - MOD - Changed macro name from
16604 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16605 #ifdef RGR_CQI_REPT
16606 *isCqiAvail = TRUE;
16614 case TFU_PUSCH_CQI_MODE_30:
16615 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16616 if((puschCqi->u.mode30Info.wideBandCqi) &&
16617 (puschCqi->u.mode30Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16619 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode30Info.wideBandCqi;
16620 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16621 /* ccpu00117452 - MOD - Changed macro name from
16622 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16623 #ifdef RGR_CQI_REPT
16624 *isCqiAvail = TRUE;
16628 extern U32 gACqiRcvdCount;
16639 case TFU_PUSCH_CQI_MODE_12:
16640 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16641 if((puschCqi->u.mode12Info.cqiIdx[0]) &&
16642 (puschCqi->u.mode12Info.cqiIdx[0] < RG_SCH_CMN_MAX_CQI))
16644 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode12Info.cqiIdx[0];
16645 /* ccpu00117452 - MOD - Changed macro name from
16646 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16647 #ifdef RGR_CQI_REPT
16648 *isCqiAvail = TRUE;
16655 if((puschCqi->u.mode12Info.cqiIdx[1]) &&
16656 (puschCqi->u.mode12Info.cqiIdx[1] < RG_SCH_CMN_MAX_CQI))
16658 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode12Info.cqiIdx[1];
16659 /* ccpu00117452 - MOD - Changed macro name from
16660 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16661 #ifdef RGR_CQI_REPT
16662 /* ccpu00117259 - ADD - Considering second codeword CQI info
16663 incase of MIMO for CQI Reporting */
16664 *is2ndCwCqiAvail = TRUE;
16671 ue->mimoInfo.puschFdbkVld = TRUE;
16672 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_12;
16673 ue->mimoInfo.puschPmiInfo.u.mode12Info = puschCqi->u.mode12Info;
16674 /* : resetting this is time based. Make use of CQI reporting
16675 * periodicity, DELTA's in determining the exact time at which this
16676 * need to be reset. */
16678 case TFU_PUSCH_CQI_MODE_22:
16679 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16680 if((puschCqi->u.mode22Info.wideBandCqi[0]) &&
16681 (puschCqi->u.mode22Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16683 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode22Info.wideBandCqi[0];
16684 /* ccpu00117452 - MOD - Changed macro name from
16685 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16686 #ifdef RGR_CQI_REPT
16687 *isCqiAvail = TRUE;
16694 if((puschCqi->u.mode22Info.wideBandCqi[1]) &&
16695 (puschCqi->u.mode22Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16697 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode22Info.wideBandCqi[1];
16698 /* ccpu00117452 - MOD - Changed macro name from
16699 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16700 #ifdef RGR_CQI_REPT
16701 /* ccpu00117259 - ADD - Considering second codeword CQI info
16702 incase of MIMO for CQI Reporting */
16703 *is2ndCwCqiAvail = TRUE;
16710 rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode22Info.wideBandPmi);
16711 ue->mimoInfo.puschFdbkVld = TRUE;
16712 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_22;
16713 ue->mimoInfo.puschPmiInfo.u.mode22Info = puschCqi->u.mode22Info;
16715 case TFU_PUSCH_CQI_MODE_31:
16716 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16717 if((puschCqi->u.mode31Info.wideBandCqi[0]) &&
16718 (puschCqi->u.mode31Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16720 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode31Info.wideBandCqi[0];
16721 /* ccpu00117452 - MOD - Changed macro name from
16722 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16723 #ifdef RGR_CQI_REPT
16724 *isCqiAvail = TRUE;
16727 if (ueDl->mimoInfo.ri > 1)
16729 if((puschCqi->u.mode31Info.wideBandCqi[1]) &&
16730 (puschCqi->u.mode31Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16732 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode31Info.wideBandCqi[1];
16733 /* ccpu00117452 - MOD - Changed macro name from
16734 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16735 #ifdef RGR_CQI_REPT
16736 /* ccpu00117259 - ADD - Considering second codeword CQI info
16737 incase of MIMO for CQI Reporting */
16738 *is2ndCwCqiAvail = TRUE;
16742 if (rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode31Info.pmi) != ROK)
16744 /* To avoid Rank and PMI inconsistency */
16745 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16746 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16748 ueDl->mimoInfo.ri = prevRiVal;
16751 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_31;
16752 ue->mimoInfo.puschPmiInfo.u.mode31Info = puschCqi->u.mode31Info;
16756 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Unknown CQI Mode %d CRNTI:%d",
16757 puschCqi->mode,ue->ueId);
16758 /* CQI decoding failed revert the RI to previous value */
16759 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16760 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16762 ueDl->mimoInfo.ri = prevRiVal;
16764 /* ccpu00117452 - MOD - Changed macro name from
16765 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16766 #ifdef RGR_CQI_REPT
16767 *isCqiAvail = FALSE;
16768 /* ccpu00117259 - ADD - Considering second codeword CQI info
16769 incase of MIMO for CQI Reporting */
16770 *is2ndCwCqiAvail = FALSE;
16777 } /* rgSCHCmnDlCqiOnPuschInd */
16781 * @brief This function Updates the DL CQI for the UE.
16785 * Function: rgSCHCmnDlCqiInd
16786 * Purpose: Updates the DL CQI for the UE
16790 * @param[in] RgSchCellCb *cell
16791 * @param[in] RgSchUeCb *ue
16792 * @param[in] TfuDlCqiRpt *dlCqi
16797 PUBLIC Void rgSCHCmnDlCqiInd
16803 CmLteTimingInfo timingInfo
16806 PUBLIC Void rgSCHCmnDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo)
16811 CmLteTimingInfo timingInfo;
16814 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
16815 /* ccpu00117452 - MOD - Changed macro name from
16816 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16817 #ifdef RGR_CQI_REPT
16818 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16819 RgrUeCqiRept ueCqiRept = {{0}};
16820 Bool isCqiAvail = FALSE;
16821 /* ccpu00117259 - ADD - Considering second codeword CQI info
16822 incase of MIMO for CQI Reporting */
16823 Bool is2ndCwCqiAvail = FALSE;
16826 TRC2(rgSCHCmnDlCqiInd);
16828 #ifdef RGR_CQI_REPT
16831 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16835 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16840 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi);
16844 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi);
16848 #ifdef CQI_CONFBITMASK_DROP
16849 if(!ue->cqiConfBitMask)
16851 if (ueDl->mimoInfo.cwInfo[0].cqi >15)
16853 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16854 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16856 else if ( ueDl->mimoInfo.cwInfo[0].cqi >= ue->prevCqi)
16858 ue->prevCqi = ueDl->mimoInfo.cwInfo[0].cqi;
16862 U8 dlCqiDeltaPrev = 0;
16863 dlCqiDeltaPrev = ue->prevCqi - ueDl->mimoInfo.cwInfo[0].cqi;
16864 if (dlCqiDeltaPrev > 3)
16865 dlCqiDeltaPrev = 3;
16866 if ((ue->prevCqi - dlCqiDeltaPrev) < 6)
16872 ue->prevCqi = ue->prevCqi - dlCqiDeltaPrev;
16874 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16875 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16881 /* ccpu00117452 - MOD - Changed macro name from
16882 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16883 #ifdef RGR_CQI_REPT
16884 /* ccpu00117259 - ADD - Considering second codeword CQI info
16885 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail\
16886 in 'if' condition*/
16887 if (RG_SCH_CQIR_IS_PUSHNCQI_ENBLE(ue) && (isCqiAvail || is2ndCwCqiAvail))
16889 ueCqiRept.cqi[0] = ueDl->mimoInfo.cwInfo[0].cqi;
16891 /* ccpu00117259 - ADD - Considering second codeword CQI info
16892 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail
16893 in 'if' condition*/
16894 ueCqiRept.cqi[1] = 0;
16895 if(is2ndCwCqiAvail)
16897 ueCqiRept.cqi[1] = ueDl->mimoInfo.cwInfo[1].cqi;
16899 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, &ueCqiRept);
16904 rgSCHCmnDlSetUeAllocLmtLa(cell, ue);
16905 rgSCHCheckAndSetTxScheme(cell, ue);
16908 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), ue->isEmtcUe);
16910 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), FALSE);
16914 if (cellSch->dl.isDlFreqSel)
16916 cellSch->apisDlfs->rgSCHDlfsDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo);
16919 /* Call SPS module to update CQI indication */
16920 rgSCHCmnSpsDlCqiIndHndlr(cell, ue, timingInfo);
16922 /* Call Specific scheduler to process on dlCqiInd */
16924 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
16926 cellSch->apisEmtcDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16931 cellSch->apisDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16934 #ifdef RG_PFS_STATS
16935 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].avgCqi +=
16936 ueDl->mimoInfo.cwInfo[0].cqi;
16937 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].totalCqiOcc++;
16941 ueDl->avgCqi += ueDl->mimoInfo.cwInfo[0].cqi;
16942 ueDl->numCqiOccns++;
16943 if (ueDl->mimoInfo.ri == 1)
16954 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16955 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16956 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw0Cqi ++;
16957 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw1Cqi ++;
16958 cell->tenbStats->sch.dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16959 cell->tenbStats->sch.dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16960 cell->tenbStats->sch.dlNumCw0Cqi ++;
16961 cell->tenbStats->sch.dlNumCw1Cqi ++;
16968 * @brief This function calculates the wideband CQI from SNR
16969 * reported for each RB.
16973 * Function: rgSCHCmnCalcWcqiFrmSnr
16974 * Purpose: Wideband CQI calculation from SNR
16976 * Invoked by: RG SCH
16978 * @param[in] RgSchCellCb *cell
16979 * @param[in] TfuSrsRpt *srsRpt,
16980 * @return Wideband CQI
16984 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr
16990 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr(cell,srsRpt)
16995 U8 wideCqi=1; /*Calculated value from SNR*/
16996 TRC2(rgSCHCmnCalcWcqiFrmSnr);
16997 /*Need to map a certain SNR with a WideCQI value.
16998 * The CQI calculation is still primitive. Further, need to
16999 * use a improvized method for calculating WideCQI from SNR*/
17000 if (srsRpt->snr[0] <=50)
17004 else if (srsRpt->snr[0]>=51 && srsRpt->snr[0] <=100)
17008 else if (srsRpt->snr[0]>=101 && srsRpt->snr[0] <=150)
17012 else if (srsRpt->snr[0]>=151 && srsRpt->snr[0] <=200)
17016 else if (srsRpt->snr[0]>=201 && srsRpt->snr[0] <=250)
17025 }/*rgSCHCmnCalcWcqiFrmSnr*/
17029 * @brief This function Updates the SRS for the UE.
17033 * Function: rgSCHCmnSrsInd
17034 * Purpose: Updates the UL SRS for the UE
17038 * @param[in] RgSchCellCb *cell
17039 * @param[in] RgSchUeCb *ue
17040 * @param[in] TfuSrsRpt *srsRpt,
17045 PUBLIC Void rgSCHCmnSrsInd
17050 CmLteTimingInfo timingInfo
17053 PUBLIC Void rgSCHCmnSrsInd(cell, ue, srsRpt, timingInfo)
17057 CmLteTimingInfo timingInfo;
17060 U8 wideCqi; /*Calculated value from SNR*/
17061 U32 recReqTime; /*Received Time in TTI*/
17062 TRC2(rgSCHCmnSrsInd);
17064 recReqTime = (timingInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) + timingInfo.slot;
17065 ue->srsCb.selectedAnt = (recReqTime/ue->srsCb.peri)%2;
17066 if(srsRpt->wideCqiPres)
17068 wideCqi = srsRpt->wideCqi;
17072 wideCqi = rgSCHCmnCalcWcqiFrmSnr(cell, srsRpt);
17074 rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi);
17076 }/*rgSCHCmnSrsInd*/
17081 * @brief This function is a handler for TA report for an UE.
17085 * Function: rgSCHCmnDlTARpt
17086 * Purpose: Determine based on UE_IDLE_TIME threshold,
17087 * whether UE needs to be Linked to the scheduler's TA list OR
17088 * if it needs a PDCCH Order.
17093 * @param[in] RgSchCellCb *cell
17094 * @param[in] RgSchUeCb *ue
17099 PUBLIC Void rgSCHCmnDlTARpt
17105 PUBLIC Void rgSCHCmnDlTARpt(cell, ue)
17110 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17111 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
17112 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
17113 CmLListCp poInactvLst;
17115 TRC2(rgSCHCmnDlTARpt);
17117 /* RACHO: If UE idle time is more than threshold, then
17118 * set its poInactv pdcch order inactivity */
17119 /* Fix : syed Ignore if TaTmr is not configured */
17120 if ((ue->dl.taCb.cfgTaTmr) && (rgSCHCmnUeIdleExdThrsld(cell, ue) == ROK))
17122 U32 prevDlMsk = ue->dl.dlInactvMask;
17123 U32 prevUlMsk = ue->ul.ulInactvMask;
17124 ue->dl.dlInactvMask |= RG_PDCCHODR_INACTIVE;
17125 ue->ul.ulInactvMask |= RG_PDCCHODR_INACTIVE;
17126 /* Indicate Specific scheduler for this UEs inactivity */
17127 cmLListInit(&poInactvLst);
17128 cmLListAdd2Tail(&poInactvLst, &ueDl->rachInfo.inActUeLnk);
17129 ueDl->rachInfo.inActUeLnk.node = (PTR)ue;
17130 /* Send inactivate ind only if not already sent */
17131 if (prevDlMsk == 0)
17133 cellSch->apisDl->rgSCHDlInactvtUes(cell, &poInactvLst);
17135 if (prevUlMsk == 0)
17137 cellSch->apisUl->rgSCHUlInactvtUes(cell, &poInactvLst);
17142 /* Fix: ccpu00124009 Fix for loop in the linked list "cellDl->taLst" */
17143 if (!ue->dlTaLnk.node)
17146 if(cell->emtcEnable)
17150 rgSCHEmtcAddToTaLst(cellDl,ue);
17157 cmLListAdd2Tail(&cellDl->taLst, &ue->dlTaLnk);
17158 ue->dlTaLnk.node = (PTR)ue;
17163 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
17164 "<TA>TA duplicate entry attempt failed: UEID:%u",
17173 * @brief Indication of UL CQI.
17177 * Function : rgSCHCmnFindUlCqiUlTxAnt
17179 * - Finds the Best Tx Antenna amongst the CQIs received
17180 * from Two Tx Antennas.
17182 * @param[in] RgSchCellCb *cell
17183 * @param[in] RgSchUeCb *ue
17184 * @param[in] U8 wideCqi
17188 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt
17195 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi)
17201 ue->validTxAnt = 1;
17203 } /* rgSCHCmnFindUlCqiUlTxAnt */
17207 * @brief Indication of UL CQI.
17211 * Function : rgSCHCmnUlCqiInd
17213 * - Updates uplink CQI information for the UE. Computes and
17214 * stores the lowest CQI of CQIs reported in all subbands.
17216 * @param[in] RgSchCellCb *cell
17217 * @param[in] RgSchUeCb *ue
17218 * @param[in] TfuUlCqiRpt *ulCqiInfo
17222 PUBLIC Void rgSCHCmnUlCqiInd
17226 TfuUlCqiRpt *ulCqiInfo
17229 PUBLIC Void rgSCHCmnUlCqiInd(cell, ue, ulCqiInfo)
17232 TfuUlCqiRpt *ulCqiInfo;
17235 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17236 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17241 #if (defined(SCH_STATS) || defined(TENB_STATS))
17242 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
17245 TRC2(rgSCHCmnUlCqiInd);
17246 /* consider inputs from SRS handlers about SRS occassions
17247 * in determining the UL TX Antenna selection */
17248 ueUl->crntUlCqi[0] = ulCqiInfo->wideCqi;
17250 ueUl->validUlCqi = ueUl->crntUlCqi[0];
17251 ue->validTxAnt = 0;
17253 iTbsNew = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][ueUl->validUlCqi];
17254 previTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
17256 if (RG_ITBS_DIFF(iTbsNew, previTbs) > 5)
17258 /* Ignore this iTBS report and mark that last iTBS report was */
17259 /* ignored so that subsequently we reset the LA algorithm */
17260 ueUl->ulLaCb.lastiTbsIgnored = TRUE;
17264 if (ueUl->ulLaCb.lastiTbsIgnored != TRUE)
17266 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17267 (80 * ueUl->ulLaCb.cqiBasediTbs))/100;
17271 /* Reset the LA as iTbs in use caught up with the value */
17272 /* reported by UE. */
17273 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17274 (80 * previTbs * 100))/100;
17275 ueUl->ulLaCb.deltaiTbs = 0;
17276 ueUl->ulLaCb.lastiTbsIgnored = FALSE;
17281 rgSCHPwrUlCqiInd(cell, ue);
17283 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17285 rgSCHCmnSpsUlCqiInd(cell, ue);
17288 /* Applicable to only some schedulers */
17290 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
17292 cellSch->apisEmtcUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17297 cellSch->apisUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17301 ueUl->numCqiOccns++;
17302 ueUl->avgCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17307 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17308 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulNumCqi ++;
17309 cell->tenbStats->sch.ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17310 cell->tenbStats->sch.ulNumCqi ++;
17315 } /* rgSCHCmnUlCqiInd */
17318 * @brief Returns HARQ proc for which data expected now.
17322 * Function: rgSCHCmnUlHqProcForUe
17323 * Purpose: This function returns the harq process for
17324 * which data is expected in the current subframe.
17325 * It does not validate that the HARQ process
17326 * has an allocation.
17330 * @param[in] RgSchCellCb *cell
17331 * @param[in] CmLteTimingInfo frm
17332 * @param[in] RgSchUeCb *ue
17333 * @param[out] RgSchUlHqProcCb **procRef
17337 PUBLIC Void rgSCHCmnUlHqProcForUe
17340 CmLteTimingInfo frm,
17342 RgSchUlHqProcCb **procRef
17345 PUBLIC Void rgSCHCmnUlHqProcForUe(cell, frm, ue, procRef)
17347 CmLteTimingInfo frm;
17349 RgSchUlHqProcCb **procRef;
17353 U8 procId = rgSCHCmnGetUlHqProcIdx(&frm, cell);
17355 TRC2(rgSCHCmnUlHqProcForUe);
17357 *procRef = rgSCHUhmGetUlHqProc(cell, ue, procId);
17359 *procRef = rgSCHUhmGetUlProcByTime(cell, ue, frm);
17366 * @brief Update harq process for allocation.
17370 * Function : rgSCHCmnUpdUlHqProc
17372 * This function is invoked when harq process
17373 * control block is now in a new memory location
17374 * thus requiring a pointer/reference update.
17376 * @param[in] RgSchCellCb *cell
17377 * @param[in] RgSchUlHqProcCb *curProc
17378 * @param[in] RgSchUlHqProcCb *oldProc
17384 PUBLIC S16 rgSCHCmnUpdUlHqProc
17387 RgSchUlHqProcCb *curProc,
17388 RgSchUlHqProcCb *oldProc
17391 PUBLIC S16 rgSCHCmnUpdUlHqProc(cell, curProc, oldProc)
17393 RgSchUlHqProcCb *curProc;
17394 RgSchUlHqProcCb *oldProc;
17397 TRC2(rgSCHCmnUpdUlHqProc);
17401 #if (ERRCLASS & ERRCLS_DEBUG)
17402 if (curProc->alloc == NULLP)
17407 curProc->alloc->hqProc = curProc;
17409 } /* rgSCHCmnUpdUlHqProc */
17412 /*MS_WORKAROUND for CR FIXME */
17414 * @brief Hsndles BSR timer expiry
17418 * Function : rgSCHCmnBsrTmrExpry
17420 * This function is invoked when periodic BSR timer expires for a UE.
17422 * @param[in] RgSchUeCb *ue
17428 PUBLIC S16 rgSCHCmnBsrTmrExpry
17433 PUBLIC S16 rgSCHCmnBsrTmrExpry(ueCb)
17437 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ueCb->cell);
17439 TRC2(rgSCHCmnBsrTmrExpry)
17441 ueCb->isSrGrant = TRUE;
17444 emtcStatsUlBsrTmrTxp++;
17448 if(ueCb->cell->emtcEnable)
17452 cellSch->apisEmtcUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17459 cellSch->apisUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17466 * @brief Short BSR update.
17470 * Function : rgSCHCmnUpdBsrShort
17472 * This functions does requisite updates to handle short BSR reporting.
17474 * @param[in] RgSchCellCb *cell
17475 * @param[in] RgSchUeCb *ue
17476 * @param[in] RgSchLcgCb *ulLcg
17477 * @param[in] U8 bsr
17478 * @param[out] RgSchErrInfo *err
17484 PUBLIC S16 rgSCHCmnUpdBsrShort
17493 PUBLIC S16 rgSCHCmnUpdBsrShort(cell, ue, ulLcg, bsr, err)
17503 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17505 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17506 RgSchCmnLcg *cmnLcg = NULLP;
17511 TRC2(rgSCHCmnUpdBsrShort);
17513 if (!RGSCH_LCG_ISCFGD(ulLcg))
17515 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17518 for (lcgCnt=0; lcgCnt<4; lcgCnt++)
17521 /* Set BS of all other LCGs to Zero.
17522 If Zero BSR is reported in Short BSR include this LCG too */
17523 if ((lcgCnt != ulLcg->lcgId) ||
17524 (!bsr && !ueUl->hqEnt.numBusyHqProcs))
17526 /* If old BO is zero do nothing */
17527 if(((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs != 0)
17529 for(idx = 0; idx < ue->ul.lcgArr[lcgCnt].numLch; idx++)
17531 if((ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount) &&
17532 (ue->ulActiveLCs & (1 <<
17533 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1))))
17536 ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount--;
17537 ue->ulActiveLCs &= ~(1 <<
17538 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1));
17544 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgCnt]))
17546 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs = 0;
17547 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->reportedBs = 0;
17552 if(ulLcg->lcgId && bsr && (((RgSchCmnLcg *)(ulLcg->sch))->bs == 0))
17554 for(idx = 0; idx < ulLcg->numLch; idx++)
17557 if (!(ue->ulActiveLCs & (1 << (ulLcg->lcArray[idx]->qciCb->qci -1))))
17559 ulLcg->lcArray[idx]->qciCb->ulUeCount++;
17560 ue->ulActiveLCs |= (1 << (ulLcg->lcArray[idx]->qciCb->qci -1));
17565 /* Resetting the nonGbrLcgBs info here */
17566 ue->ul.nonGbrLcgBs = 0;
17567 ue->ul.nonLcg0Bs = 0;
17569 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17571 if (TRUE == ue->ul.useExtBSRSizes)
17573 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17577 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17579 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17581 /* TBD check for effGbr != 0 */
17582 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17584 else if (0 == ulLcg->lcgId)
17586 /* This is added for handling LCG0 */
17587 cmnLcg->bs = cmnLcg->reportedBs;
17591 /* Update non GBR LCG's BS*/
17592 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17593 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17595 ue->ul.totalBsr = cmnLcg->bs;
17598 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (bsr == 0))
17600 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17604 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17606 rgSCHCmnSpsBsrRpt(cell, ue, ulLcg);
17609 rgSCHCmnUpdUlCompEffBsr(ue);
17612 if(cell->emtcEnable)
17616 cellSch->apisEmtcUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17623 cellSch->apisUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17627 if (ue->ul.isUlCaEnabled && ue->numSCells)
17629 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17631 #ifndef PAL_ENABLE_UL_CA
17632 if((ue->cellInfo[sCellIdx] != NULLP) &&
17633 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17635 if(ue->cellInfo[sCellIdx] != NULLP)
17638 cellSch->apisUl->rgSCHUpdBsrShort(ue->cellInfo[sCellIdx]->cell,
17649 * @brief Truncated BSR update.
17653 * Function : rgSCHCmnUpdBsrTrunc
17655 * This functions does required updates to handle truncated BSR report.
17658 * @param[in] RgSchCellCb *cell
17659 * @param[in] RgSchUeCb *ue
17660 * @param[in] RgSchLcgCb *ulLcg
17661 * @param[in] U8 bsr
17662 * @param[out] RgSchErrInfo *err
17668 PUBLIC S16 rgSCHCmnUpdBsrTrunc
17677 PUBLIC S16 rgSCHCmnUpdBsrTrunc(cell, ue, ulLcg, bsr, err)
17685 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17686 RgSchCmnLcg *cmnLcg = NULLP;
17692 TRC2(rgSCHCmnUpdBsrTrunc);
17694 if (!RGSCH_LCG_ISCFGD(ulLcg))
17696 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17699 /* set all higher prio lcgs bs to 0 and update this lcgs bs and
17700 total bsr= sumofall lcgs bs */
17703 for (cnt = ulLcg->lcgId-1; cnt >= 0; cnt--)
17706 /* If Existing BO is zero the don't do anything */
17707 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs != 0)
17709 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17712 if((ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount) &&
17713 (ue->ulActiveLCs & (1 <<
17714 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17716 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount--;
17717 ue->ulActiveLCs &= ~(1 <<
17718 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17723 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs = 0;
17724 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->reportedBs = 0;
17729 for (cnt = ulLcg->lcgId; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17731 if (ulLcg->lcgId == 0)
17735 /* If Existing BO is zero the don't do anything */
17736 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs == 0)
17738 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17741 if (!(ue->ulActiveLCs & (1 <<
17742 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17744 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount++;
17745 ue->ulActiveLCs |= (1 <<
17746 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17752 ue->ul.nonGbrLcgBs = 0;
17753 ue->ul.nonLcg0Bs = 0;
17754 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17755 if (TRUE == ue->ul.useExtBSRSizes)
17757 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17761 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17763 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17765 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17767 else if(ulLcg->lcgId == 0)
17769 /* This is for handeling LCG0 */
17770 cmnLcg->bs = cmnLcg->reportedBs;
17774 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
17775 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17777 ue->ul.totalBsr = cmnLcg->bs;
17779 for (cnt = ulLcg->lcgId+1; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17781 /* TODO: The bs for the other LCGs may be stale because some or all of
17782 * the part of bs may have been already scheduled/data received. Please
17783 * consider this when truncated BSR is tested/implemented */
17784 ue->ul.totalBsr += ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs;
17787 rgSCHCmnUpdUlCompEffBsr(ue);
17790 if(cell->emtcEnable)
17794 cellSch->apisEmtcUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17801 cellSch->apisUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17805 if (ue->ul.isUlCaEnabled && ue->numSCells)
17807 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17809 #ifndef PAL_ENABLE_UL_CA
17810 if((ue->cellInfo[sCellIdx] != NULLP) &&
17811 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17813 if(ue->cellInfo[sCellIdx] != NULLP)
17816 cellSch->apisUl->rgSCHUpdBsrTrunc(ue->cellInfo[sCellIdx]->cell, ue, ulLcg, bsr);
17826 * @brief Long BSR update.
17830 * Function : rgSCHCmnUpdBsrLong
17832 * - Update BSRs for all configured LCGs.
17833 * - Update priority of LCGs if needed.
17834 * - Update UE's position within/across uplink scheduling queues.
17837 * @param[in] RgSchCellCb *cell
17838 * @param[in] RgSchUeCb *ue
17839 * @param[in] U8 bsArr[]
17840 * @param[out] RgSchErrInfo *err
17846 PUBLIC S16 rgSCHCmnUpdBsrLong
17854 PUBLIC S16 rgSCHCmnUpdBsrLong(cell, ue, bsArr, err)
17861 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17862 U32 tmpBsArr[4] = {0, 0, 0, 0};
17870 TRC2(rgSCHCmnUpdBsrLong);
17873 for(idx1 = 1; idx1 < RGSCH_MAX_LCG_PER_UE; idx1++)
17875 /* If Old BO is non zero then do nothing */
17876 if ((((RgSchCmnLcg *)(ue->ul.lcgArr[idx1].sch))->bs == 0)
17879 for(idx2 = 0; idx2 < ue->ul.lcgArr[idx1].numLch; idx2++)
17882 if (!(ue->ulActiveLCs & (1 <<
17883 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1))))
17885 ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->ulUeCount++;
17886 ue->ulActiveLCs |= (1 <<
17887 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1));
17893 ue->ul.nonGbrLcgBs = 0;
17894 ue->ul.nonLcg0Bs = 0;
17896 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[0]))
17898 if (TRUE == ue->ul.useExtBSRSizes)
17900 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnExtBsrTbl[bsArr[0]];
17901 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnExtBsrTbl[bsArr[0]];
17902 tmpBsArr[0] = rgSchCmnExtBsrTbl[bsArr[0]];
17906 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnBsrTbl[bsArr[0]];
17907 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnBsrTbl[bsArr[0]];
17908 tmpBsArr[0] = rgSchCmnBsrTbl[bsArr[0]];
17911 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
17913 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
17915 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
17917 if (TRUE == ue->ul.useExtBSRSizes)
17919 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsArr[lcgId]];
17923 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsArr[lcgId]];
17925 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17927 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17928 tmpBsArr[lcgId] = cmnLcg->bs;
17932 nonGbrBs += cmnLcg->reportedBs;
17933 tmpBsArr[lcgId] = cmnLcg->reportedBs;
17934 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17938 ue->ul.nonGbrLcgBs = RGSCH_MIN(nonGbrBs,ue->ul.effAmbr);
17940 ue->ul.totalBsr = tmpBsArr[0] + tmpBsArr[1] + tmpBsArr[2] + tmpBsArr[3];
17942 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (ue->ul.totalBsr == 0))
17944 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17949 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE) /* SPS_FIX */
17951 if(ue->ul.totalBsr - tmpBsArr[1] == 0)
17952 {/* Updaing the BSR to SPS only if LCG1 BS is present in sps active state */
17953 rgSCHCmnSpsBsrRpt(cell, ue, &ue->ul.lcgArr[1]);
17957 rgSCHCmnUpdUlCompEffBsr(ue);
17960 if(cell->emtcEnable)
17964 cellSch->apisEmtcUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17971 cellSch->apisUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17975 if (ue->ul.isUlCaEnabled && ue->numSCells)
17977 for(U8 idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
17979 #ifndef PAL_ENABLE_UL_CA
17980 if((ue->cellInfo[idx] != NULLP) &&
17981 (ue->cellInfo[idx]->sCellState == RG_SCH_SCELL_ACTIVE))
17983 if(ue->cellInfo[idx] != NULLP)
17986 cellSch->apisUl->rgSCHUpdBsrLong(ue->cellInfo[idx]->cell, ue, bsArr);
17996 * @brief PHR update.
18000 * Function : rgSCHCmnUpdExtPhr
18002 * Updates extended power headroom information for an UE.
18004 * @param[in] RgSchCellCb *cell
18005 * @param[in] RgSchUeCb *ue
18006 * @param[in] U8 phr
18007 * @param[out] RgSchErrInfo *err
18013 PUBLIC S16 rgSCHCmnUpdExtPhr
18017 RgInfExtPhrCEInfo *extPhr,
18021 PUBLIC S16 rgSCHCmnUpdExtPhr(cell, ue, extPhr, err)
18024 RgInfExtPhrCEInfo *extPhr;
18028 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18029 RgSchCmnAllocRecord *allRcd;
18030 CmLList *node = ueUl->ulAllocLst.last;
18033 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18035 TRC2(rgSCHCmnUpdExtPhr);
18041 allRcd = (RgSchCmnAllocRecord *)node->node;
18043 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18045 rgSCHPwrUpdExtPhr(cell, ue, extPhr, allRcd);
18050 if(ulSpsUe->isUlSpsActv)
18052 rgSCHCmnSpsPhrInd(cell,ue);
18057 } /* rgSCHCmnUpdExtPhr */
18063 * @brief PHR update.
18067 * Function : rgSCHCmnUpdPhr
18069 * Updates power headroom information for an UE.
18071 * @param[in] RgSchCellCb *cell
18072 * @param[in] RgSchUeCb *ue
18073 * @param[in] U8 phr
18074 * @param[out] RgSchErrInfo *err
18080 PUBLIC S16 rgSCHCmnUpdPhr
18088 PUBLIC S16 rgSCHCmnUpdPhr(cell, ue, phr, err)
18095 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18096 RgSchCmnAllocRecord *allRcd;
18097 CmLList *node = ueUl->ulAllocLst.last;
18100 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18102 TRC2(rgSCHCmnUpdPhr);
18108 allRcd = (RgSchCmnAllocRecord *)node->node;
18110 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18112 rgSCHPwrUpdPhr(cell, ue, phr, allRcd, RG_SCH_CMN_PWR_USE_CFG_MAX_PWR);
18117 if(ulSpsUe->isUlSpsActv)
18119 rgSCHCmnSpsPhrInd(cell,ue);
18124 } /* rgSCHCmnUpdPhr */
18127 * @brief UL grant for contention resolution.
18131 * Function : rgSCHCmnContResUlGrant
18133 * Add UE to another queue specifically for CRNTI based contention
18137 * @param[in] RgSchUeCb *ue
18138 * @param[out] RgSchErrInfo *err
18144 PUBLIC S16 rgSCHCmnContResUlGrant
18151 PUBLIC S16 rgSCHCmnContResUlGrant(cell, ue, err)
18157 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18158 TRC2(rgSCHCmnContResUlGrant);
18161 if(cell->emtcEnable)
18165 cellSch->apisEmtcUl->rgSCHContResUlGrant(cell, ue);
18172 cellSch->apisUl->rgSCHContResUlGrant(cell, ue);
18178 * @brief SR reception handling.
18182 * Function : rgSCHCmnSrRcvd
18184 * - Update UE's position within/across uplink scheduling queues
18185 * - Update priority of LCGs if needed.
18187 * @param[in] RgSchCellCb *cell
18188 * @param[in] RgSchUeCb *ue
18189 * @param[in] CmLteTimingInfo frm
18190 * @param[out] RgSchErrInfo *err
18196 PUBLIC S16 rgSCHCmnSrRcvd
18200 CmLteTimingInfo frm,
18204 PUBLIC S16 rgSCHCmnSrRcvd(cell, ue, frm, err)
18207 CmLteTimingInfo frm;
18211 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18212 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18213 CmLList *node = ueUl->ulAllocLst.last;
18215 TRC2(rgSCHCmnSrRcvd);
18218 emtcStatsUlTomSrInd++;
18221 RGSCH_INCR_SUB_FRAME(frm, 1); /* 1 TTI after the time SR was sent */
18224 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)node->node;
18225 if (RGSCH_TIMEINFO_SAME(frm, allRcd->allocTime))
18231 //TODO_SID Need to check when it is getting triggered
18232 ue->isSrGrant = TRUE;
18234 if(cell->emtcEnable)
18238 cellSch->apisEmtcUl->rgSCHSrRcvd(cell, ue);
18245 cellSch->apisUl->rgSCHSrRcvd(cell, ue);
18251 * @brief Returns first uplink allocation to send reception
18256 * Function: rgSCHCmnFirstRcptnReq(cell)
18257 * Purpose: This function returns the first uplink allocation
18258 * (or NULLP if there is none) in the subframe
18259 * in which is expected to prepare and send reception
18264 * @param[in] RgSchCellCb *cell
18265 * @return RgSchUlAlloc*
18268 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq
18273 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq(cell)
18277 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18279 RgSchUlAlloc* alloc = NULLP;
18281 TRC2(rgSCHCmnFirstRcptnReq);
18283 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18285 RgSchUlSf* sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18286 alloc = rgSCHUtlUlAllocFirst(sf);
18288 if (alloc && alloc->hqProc == NULLP)
18290 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18298 * @brief Returns first uplink allocation to send reception
18303 * Function: rgSCHCmnNextRcptnReq(cell)
18304 * Purpose: This function returns the next uplink allocation
18305 * (or NULLP if there is none) in the subframe
18306 * in which is expected to prepare and send reception
18311 * @param[in] RgSchCellCb *cell
18312 * @return RgSchUlAlloc*
18315 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq
18318 RgSchUlAlloc *alloc
18321 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq(cell, alloc)
18323 RgSchUlAlloc *alloc;
18326 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18328 //RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18330 TRC2(rgSCHCmnNextRcptnReq);
18332 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18334 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18336 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18337 if (alloc && alloc->hqProc == NULLP)
18339 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18350 * @brief Collates DRX enabled UE's scheduled in this SF
18354 * Function: rgSCHCmnDrxStrtInActvTmrInUl(cell)
18355 * Purpose: This function collates the link
18356 * of UE's scheduled in this SF who
18357 * have drx enabled. It then calls
18358 * DRX specific function to start/restart
18359 * inactivity timer in Ul
18363 * @param[in] RgSchCellCb *cell
18367 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl
18372 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl(cell)
18376 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18377 RgSchUlSf *sf = &(cellUl->ulSfArr[cellUl->schdIdx]);
18378 RgSchUlAlloc *alloc = rgSCHUtlUlAllocFirst(sf);
18383 TRC2(rgSCHCmnDrxStrtInActvTmrInUl);
18385 cmLListInit(&ulUeLst);
18393 if (!(alloc->grnt.isRtx) && ueCb->isDrxEnabled && !(ueCb->isSrGrant)
18395 /* ccpu00139513- DRX inactivity timer should not be started for
18396 * UL SPS occasions */
18397 && (alloc->hqProc->isSpsOccnHqP == FALSE)
18401 cmLListAdd2Tail(&ulUeLst,&(ueCb->ulDrxInactvTmrLnk));
18402 ueCb->ulDrxInactvTmrLnk.node = (PTR)ueCb;
18406 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18409 (Void)rgSCHDrxStrtInActvTmr(cell,&ulUeLst,RG_SCH_DRX_UL);
18416 * @brief Returns first uplink allocation to send HARQ feedback
18421 * Function: rgSCHCmnFirstHqFdbkAlloc
18422 * Purpose: This function returns the first uplink allocation
18423 * (or NULLP if there is none) in the subframe
18424 * for which it is expected to prepare and send HARQ
18429 * @param[in] RgSchCellCb *cell
18430 * @param[in] U8 idx
18431 * @return RgSchUlAlloc*
18434 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc
18440 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc(cell, idx)
18445 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18447 RgSchUlAlloc *alloc = NULLP;
18449 TRC2(rgSCHCmnFirstHqFdbkAlloc);
18451 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18453 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18454 alloc = rgSCHUtlUlAllocFirst(sf);
18456 while (alloc && (alloc->hqProc == NULLP))
18458 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18466 * @brief Returns next allocation to send HARQ feedback for.
18470 * Function: rgSCHCmnNextHqFdbkAlloc(cell)
18471 * Purpose: This function returns the next uplink allocation
18472 * (or NULLP if there is none) in the subframe
18473 * for which HARQ feedback needs to be sent.
18477 * @param[in] RgSchCellCb *cell
18478 * @return RgSchUlAlloc*
18481 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc
18484 RgSchUlAlloc *alloc,
18488 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc(cell, alloc, idx)
18490 RgSchUlAlloc *alloc;
18494 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18495 TRC2(rgSCHCmnNextHqFdbkAlloc);
18497 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18499 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18501 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18502 while (alloc && (alloc->hqProc == NULLP))
18504 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18514 /***********************************************************
18516 * Func : rgSCHCmnUlGetITbsFrmIMcs
18518 * Desc : Returns the Itbs that is mapped to an Imcs
18519 * for the case of uplink.
18527 **********************************************************/
18529 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs
18534 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs(iMcs)
18538 TRC2(rgSCHCmnUlGetITbsFrmIMcs);
18540 RETVALUE(rgUlIMcsTbl[iMcs].iTbs);
18543 /***********************************************************
18545 * Func : rgSCHCmnUlGetIMcsFrmITbs
18547 * Desc : Returns the Imcs that is mapped to an Itbs
18548 * for the case of uplink.
18552 * Notes: For iTbs 19, iMcs is dependant on modulation order.
18553 * Refer to 36.213, Table 8.6.1-1 and 36.306 Table 4.1-2
18554 * for UE capability information
18558 **********************************************************/
18560 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs
18563 CmLteUeCategory ueCtg
18566 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg)
18568 CmLteUeCategory ueCtg;
18572 TRC2(rgSCHCmnUlGetIMcsFrmITbs);
18578 /*a higher layer can force a 64QAM UE to transmit at 16QAM.
18579 * We currently do not support this. Once the support for such
18580 * is added, ueCtg should be replaced by current transmit
18581 * modulation configuration.Refer to 36.213 -8.6.1
18583 else if ( iTbs < 19 )
18587 else if ((iTbs == 19) && (ueCtg != CM_LTE_UE_CAT_5))
18597 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
18598 was seen when IMCS exceeds 20 on T2k TDD*/
18608 /***********************************************************
18610 * Func : rgSCHCmnUlMinTbBitsForITbs
18612 * Desc : Returns the minimum number of bits that can
18613 * be given as grant for a specific CQI.
18621 **********************************************************/
18623 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs
18625 RgSchCmnUlCell *cellUl,
18629 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs)
18630 RgSchCmnUlCell *cellUl;
18634 TRC2(rgSCHCmnUlMinTbBitsForITbs);
18636 RGSCH_ARRAY_BOUND_CHECK(0, rgTbSzTbl[0], iTbs);
18638 RETVALUE(rgTbSzTbl[0][iTbs][cellUl->sbSize-1]);
18641 /***********************************************************
18643 * Func : rgSCHCmnUlSbAlloc
18645 * Desc : Given a required 'number of subbands' and a hole,
18646 * returns a suitable alloc such that the subband
18647 * allocation size is valid
18651 * Notes: Does not assume either passed numSb or hole size
18652 * to be valid for allocation, and hence arrives at
18653 * an acceptable value.
18656 **********************************************************/
18658 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc
18665 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc(sf, numSb, hole)
18671 U8 holeSz; /* valid hole size */
18672 RgSchUlAlloc *alloc;
18673 TRC2(rgSCHCmnUlSbAlloc);
18675 if ((holeSz = rgSchCmnMult235Tbl[hole->num].prvMatch) == hole->num)
18677 numSb = rgSchCmnMult235Tbl[numSb].match;
18678 if (numSb >= holeSz)
18680 alloc = rgSCHUtlUlAllocGetCompHole(sf, hole);
18684 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18689 if (numSb < holeSz)
18691 numSb = rgSchCmnMult235Tbl[numSb].match;
18695 numSb = rgSchCmnMult235Tbl[numSb].prvMatch;
18698 if ( numSb >= holeSz )
18702 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18708 * @brief To fill the RgSchCmnUeUlAlloc structure of UeCb.
18712 * Function: rgSCHCmnUlUeFillAllocInfo
18713 * Purpose: Specific scheduler to call this API to fill the alloc
18716 * Invoked by: Scheduler
18718 * @param[in] RgSchCellCb *cell
18719 * @param[out] RgSchUeCb *ue
18723 PUBLIC Void rgSCHCmnUlUeFillAllocInfo
18729 PUBLIC Void rgSCHCmnUlUeFillAllocInfo(cell, ue)
18734 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18735 RgSchCmnUeUlAlloc *ulAllocInfo;
18736 RgSchCmnUlUe *ueUl;
18738 TRC2(rgSCHCmnUlUeFillAllocInfo);
18740 ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18741 ulAllocInfo = &ueUl->alloc;
18743 /* Fill alloc structure */
18744 rgSCHCmnUlAllocFillTpc(cell, ue, ulAllocInfo->alloc);
18745 rgSCHCmnUlAllocFillNdmrs(cellUl, ulAllocInfo->alloc);
18746 rgSCHCmnUlAllocLnkHqProc(ue, ulAllocInfo->alloc, ulAllocInfo->alloc->hqProc,
18747 ulAllocInfo->alloc->hqProc->isRetx);
18749 rgSCHCmnUlFillPdcchWithAlloc(ulAllocInfo->alloc->pdcch,
18750 ulAllocInfo->alloc, ue);
18751 /* Recording information about this allocation */
18752 rgSCHCmnUlRecordUeAlloc(cell, ue);
18754 /* Update the UE's outstanding allocation */
18755 if (!ulAllocInfo->alloc->hqProc->isRetx)
18757 rgSCHCmnUlUpdOutStndAlloc(cell, ue, ulAllocInfo->allocdBytes);
18764 * @brief Update the UEs outstanding alloc based on the BSR report's timing.
18769 * Function: rgSCHCmnUpdUlCompEffBsr
18770 * Purpose: Clear off all the allocations from outstanding allocation that
18771 * are later than or equal to BSR timing information (stored in UEs datIndTime).
18773 * Invoked by: Scheduler
18775 * @param[in] RgSchUeCb *ue
18779 PRIVATE Void rgSCHCmnUpdUlCompEffBsr
18784 PRIVATE Void rgSCHCmnUpdUlCompEffBsr(ue)
18788 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,ue->cell);
18789 CmLList *node = ueUl->ulAllocLst.last;
18790 RgSchCmnAllocRecord *allRcd;
18791 U32 outStndAlloc=0;
18792 U32 nonLcg0OutStndAllocBs=0;
18795 RgSchCmnLcg *cmnLcg = NULLP;
18796 TRC2(rgSCHCmnUpdUlCompEffBsr);
18800 allRcd = (RgSchCmnAllocRecord *)node->node;
18801 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18810 allRcd = (RgSchCmnAllocRecord *)node->node;
18812 outStndAlloc += allRcd->alloc;
18815 cmnLcg = (RgSchCmnLcg *)(ue->ul.lcgArr[0].sch);
18816 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
18817 if (cmnLcg->bs > outStndAlloc)
18819 cmnLcg->bs -= outStndAlloc;
18820 ue->ul.minReqBytes = cmnLcg->bs;
18825 nonLcg0OutStndAllocBs = outStndAlloc - cmnLcg->bs;
18829 for(lcgId = 1;lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
18831 if(RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
18833 cmnLcg = ((RgSchCmnLcg *) (ue->ul.lcgArr[lcgId].sch));
18834 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
18836 nonLcg0Bsr += cmnLcg->bs;
18840 nonLcg0Bsr += ue->ul.nonGbrLcgBs;
18841 if (nonLcg0OutStndAllocBs > nonLcg0Bsr)
18847 nonLcg0Bsr -= nonLcg0OutStndAllocBs;
18849 ue->ul.nonLcg0Bs = nonLcg0Bsr;
18850 /* Cap effBsr with nonLcg0Bsr and append lcg0 bs.
18851 * nonLcg0Bsr limit applies only to lcg1,2,3 */
18852 /* better be handled in individual scheduler */
18853 ue->ul.effBsr = nonLcg0Bsr +\
18854 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
18859 * @brief Records information about the current allocation.
18863 * Function: rgSCHCmnUlRecordUeAlloc
18864 * Purpose: Records information about the curent allocation.
18865 * This includes the allocated bytes, as well
18866 * as some power information.
18868 * Invoked by: Scheduler
18870 * @param[in] RgSchCellCb *cell
18871 * @param[in] RgSchUeCb *ue
18875 PUBLIC Void rgSCHCmnUlRecordUeAlloc
18881 PUBLIC Void rgSCHCmnUlRecordUeAlloc(cell, ue)
18887 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18889 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18890 CmLListCp *lst = &ueUl->ulAllocLst;
18891 CmLList *node = ueUl->ulAllocLst.first;
18892 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18893 RgSchCmnUeUlAlloc *ulAllocInfo = &ueUl->alloc;
18894 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
18895 TRC2(rgSCHCmnUlRecordUeAlloc);
18897 cmLListDelFrm(lst, &allRcd->lnk);
18899 /* To the crntTime, add the MIN time at which UE will
18900 * actually send the BSR i.e DELTA+4 */
18901 allRcd->allocTime = cell->crntTime;
18902 /*ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
18904 if(ue->isEmtcUe == TRUE)
18906 RGSCH_INCR_SUB_FRAME_EMTC(allRcd->allocTime,
18907 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18912 RGSCH_INCR_SUB_FRAME(allRcd->allocTime,
18913 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18916 allRcd->allocTime = cellUl->schdTime;
18918 cmLListAdd2Tail(lst, &allRcd->lnk);
18920 /* Filling in the parameters to be recorded */
18921 allRcd->alloc = ulAllocInfo->allocdBytes;
18922 //allRcd->numRb = ulAllocInfo->alloc->grnt.numRb;
18923 allRcd->numRb = (ulAllocInfo->alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
18924 /*Recording the UL CQI derived from the maxUlCqi */
18925 allRcd->cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
18926 allRcd->tpc = ulAllocInfo->alloc->grnt.tpc;
18928 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18930 cell->measurements.ulBytesCnt += ulAllocInfo->allocdBytes;
18935 /** PHR handling for MSG3
18936 * @brief Records allocation information of msg3 in the the UE.
18940 * Function: rgSCHCmnUlRecMsg3Alloc
18941 * Purpose: Records information about msg3 allocation.
18942 * This includes the allocated bytes, as well
18943 * as some power information.
18945 * Invoked by: Scheduler
18947 * @param[in] RgSchCellCb *cell
18948 * @param[in] RgSchUeCb *ue
18949 * @param[in] RgSchRaCb *raCb
18953 PUBLIC Void rgSCHCmnUlRecMsg3Alloc
18960 PUBLIC Void rgSCHCmnUlRecMsg3Alloc(cell, ue, raCb)
18966 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18967 CmLListCp *lst = &ueUl->ulAllocLst;
18968 CmLList *node = ueUl->ulAllocLst.first;
18969 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18971 /* Stack Crash problem for TRACE5 changes */
18972 TRC2(rgSCHCmnUlRecMsg3Alloc);
18974 cmLListDelFrm(lst, node);
18975 allRcd->allocTime = raCb->msg3AllocTime;
18976 cmLListAdd2Tail(lst, node);
18978 /* Filling in the parameters to be recorded */
18979 allRcd->alloc = raCb->msg3Grnt.datSz;
18980 allRcd->numRb = raCb->msg3Grnt.numRb;
18981 allRcd->cqi = raCb->ccchCqi;
18982 allRcd->tpc = raCb->msg3Grnt.tpc;
18984 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18989 * @brief Keeps track of the most recent RG_SCH_CMN_MAX_ALLOC_TRACK
18990 * allocations to track. Adds this allocation to the ueUl's ulAllocLst.
18995 * Function: rgSCHCmnUlUpdOutStndAlloc
18996 * Purpose: Recent Allocation shall be at First Pos'n.
18997 * Remove the last node, update the fields
18998 * with the new allocation and add at front.
19000 * Invoked by: Scheduler
19002 * @param[in] RgSchCellCb *cell
19003 * @param[in] RgSchUeCb *ue
19004 * @param[in] U32 alloc
19008 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc
19015 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc(cell, ue, alloc)
19021 U32 nonLcg0Alloc=0;
19022 TRC2(rgSCHCmnUlUpdOutStndAlloc);
19024 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
19025 if (((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs > alloc)
19027 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs -= alloc;
19031 nonLcg0Alloc = alloc - ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19032 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = 0;
19035 if (nonLcg0Alloc >= ue->ul.nonLcg0Bs)
19037 ue->ul.nonLcg0Bs = 0;
19041 ue->ul.nonLcg0Bs -= nonLcg0Alloc;
19043 /* Cap effBsr with effAmbr and append lcg0 bs.
19044 * effAmbr limit applies only to lcg1,2,3 non GBR LCG's*/
19045 /* better be handled in individual scheduler */
19046 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
19047 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19049 if (ue->ul.effBsr == 0)
19051 if (ue->bsrTmr.tmrEvnt != TMR_NONE)
19053 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
19056 if (FALSE == ue->isSrGrant)
19058 if (ue->ul.bsrTmrCfg.isPrdBsrTmrPres)
19061 rgSCHTmrStartTmr(cell, ue, RG_SCH_TMR_BSR,
19062 ue->ul.bsrTmrCfg.prdBsrTmr);
19068 /* Resetting UEs lower Cap */
19069 ue->ul.minReqBytes = 0;
19076 * @brief Returns the "Itbs" for a given UE.
19080 * Function: rgSCHCmnUlGetITbs
19081 * Purpose: This function returns the "Itbs" for a given UE.
19083 * Invoked by: Scheduler
19085 * @param[in] RgSchUeCb *ue
19089 PUBLIC U8 rgSCHCmnUlGetITbs
19096 PUBLIC U8 rgSCHCmnUlGetITbs(cell, ue, isEcp)
19102 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
19103 /* CQI will be capped to maxUlCqi for 16qam UEs */
19104 CmLteUeCategory ueCtgy = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
19108 U8 maxiTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ueUl->maxUlCqi];
19111 TRC2(rgSCHCmnUlGetITbs);
19113 /* #ifdef RG_SCH_CMN_EXT_CP_SUP For ECP pick index 1 */
19115 if ( (ueCtgy != CM_LTE_UE_CAT_5) &&
19116 (ueUl->validUlCqi > ueUl->maxUlCqi)
19119 cqi = ueUl->maxUlCqi;
19123 cqi = ueUl->validUlCqi;
19127 iTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
19129 RG_SCH_CHK_ITBS_RANGE(iTbs, maxiTbs);
19131 iTbs = RGSCH_MIN(iTbs, ue->cell->thresholds.maxUlItbs);
19134 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
19135 was seen when IMCS exceeds 20 on T2k TDD */
19144 if ( (ueCtgy != CM_LTE_UE_CAT_5) && (ueUl->crntUlCqi[0] > ueUl->maxUlCqi ))
19146 cqi = ueUl->maxUlCqi;
19150 cqi = ueUl->crntUlCqi[0];
19153 RETVALUE(rgSchCmnUlCqiToTbsTbl[(U8)isEcp][cqi]);
19157 * @brief This function adds the UE to DLRbAllocInfo TX lst.
19161 * Function: rgSCHCmnDlRbInfoAddUeTx
19162 * Purpose: This function adds the UE to DLRbAllocInfo TX lst.
19164 * Invoked by: Common Scheduler
19166 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19167 * @param[in] RgSchUeCb *ue
19168 * @param[in] RgSchDlHqProcCb *hqP
19173 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx
19176 RgSchCmnDlRbAllocInfo *allocInfo,
19178 RgSchDlHqProcCb *hqP
19181 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx(cell, allocInfo, ue, hqP)
19183 RgSchCmnDlRbAllocInfo *allocInfo;
19185 RgSchDlHqProcCb *hqP;
19188 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
19190 TRC2(rgSCHCmnDlRbInfoAddUeTx);
19192 if (hqP->reqLnk.node == NULLP)
19194 if (cellSch->dl.isDlFreqSel)
19196 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19197 &allocInfo->dedAlloc.txHqPLst, hqP);
19202 cmLListAdd2Tail(&allocInfo->dedAlloc.txHqPLst, &hqP->reqLnk);
19204 hqP->reqLnk.node = (PTR)hqP;
19211 * @brief This function adds the UE to DLRbAllocInfo RETX lst.
19215 * Function: rgSCHCmnDlRbInfoAddUeRetx
19216 * Purpose: This function adds the UE to DLRbAllocInfo RETX lst.
19218 * Invoked by: Common Scheduler
19220 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19221 * @param[in] RgSchUeCb *ue
19222 * @param[in] RgSchDlHqProcCb *hqP
19227 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx
19230 RgSchCmnDlRbAllocInfo *allocInfo,
19232 RgSchDlHqProcCb *hqP
19235 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx(cell, allocInfo, ue, hqP)
19237 RgSchCmnDlRbAllocInfo *allocInfo;
19239 RgSchDlHqProcCb *hqP;
19242 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19244 TRC2(rgSCHCmnDlRbInfoAddUeRetx);
19246 if (cellSch->dl.isDlFreqSel)
19248 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19249 &allocInfo->dedAlloc.retxHqPLst, hqP);
19253 /* checking UE's presence in this lst is unnecessary */
19254 cmLListAdd2Tail(&allocInfo->dedAlloc.retxHqPLst, &hqP->reqLnk);
19255 hqP->reqLnk.node = (PTR)hqP;
19261 * @brief This function adds the UE to DLRbAllocInfo TX-RETX lst.
19265 * Function: rgSCHCmnDlRbInfoAddUeRetxTx
19266 * Purpose: This adds the UE to DLRbAllocInfo TX-RETX lst.
19268 * Invoked by: Common Scheduler
19270 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19271 * @param[in] RgSchUeCb *ue
19272 * @param[in] RgSchDlHqProcCb *hqP
19277 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx
19280 RgSchCmnDlRbAllocInfo *allocInfo,
19282 RgSchDlHqProcCb *hqP
19285 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx(allocInfo, ue, hqP)
19287 RgSchCmnDlRbAllocInfo *allocInfo;
19289 RgSchDlHqProcCb *hqP;
19292 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19294 TRC2(rgSCHCmnDlRbInfoAddUeRetxTx);
19296 if (cellSch->dl.isDlFreqSel)
19298 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19299 &allocInfo->dedAlloc.txRetxHqPLst, hqP);
19303 cmLListAdd2Tail(&allocInfo->dedAlloc.txRetxHqPLst, &hqP->reqLnk);
19304 hqP->reqLnk.node = (PTR)hqP;
19310 * @brief This function adds the UE to DLRbAllocInfo NonSchdRetxLst.
19314 * Function: rgSCHCmnDlAdd2NonSchdRetxLst
19315 * Purpose: During RB estimation for RETX, if allocation fails
19316 * then appending it to NonSchdRetxLst, the further
19317 * action is taken as part of Finalization in
19318 * respective schedulers.
19320 * Invoked by: Common Scheduler
19322 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19323 * @param[in] RgSchUeCb *ue
19324 * @param[in] RgSchDlHqProcCb *hqP
19329 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst
19331 RgSchCmnDlRbAllocInfo *allocInfo,
19333 RgSchDlHqProcCb *hqP
19336 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst(allocInfo, ue, hqP)
19337 RgSchCmnDlRbAllocInfo *allocInfo;
19339 RgSchDlHqProcCb *hqP;
19342 CmLList *schdLnkNode;
19344 TRC2(rgSCHCmnDlAdd2NonSchdRetxLst);
19347 if ( (hqP->sch != (RgSchCmnDlHqProc *)NULLP) &&
19348 (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP)))
19354 schdLnkNode = &hqP->schdLstLnk;
19355 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
19356 cmLListAdd2Tail(&allocInfo->dedAlloc.nonSchdRetxHqPLst, schdLnkNode);
19364 * @brief This function adds the UE to DLRbAllocInfo NonSchdTxRetxLst.
19368 * Function: rgSCHCmnDlAdd2NonSchdTxRetxLst
19369 * Purpose: During RB estimation for TXRETX, if allocation fails
19370 * then appending it to NonSchdTxRetxLst, the further
19371 * action is taken as part of Finalization in
19372 * respective schedulers.
19374 * Invoked by: Common Scheduler
19376 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19377 * @param[in] RgSchUeCb *ue
19378 * @param[in] RgSchDlHqProcCb *hqP
19384 * @brief This function handles the initialisation of DL HARQ/ACK feedback
19385 * timing information for eaach DL subframe.
19389 * Function: rgSCHCmnDlANFdbkInit
19390 * Purpose: Each DL subframe stores the sfn and subframe
19391 * information of UL subframe in which it expects
19392 * HARQ ACK/NACK feedback for this subframe.It
19393 * generates the information based on Downlink
19394 * Association Set Index table.
19396 * Invoked by: Scheduler
19398 * @param[in] RgSchCellCb* cell
19403 PRIVATE S16 rgSCHCmnDlANFdbkInit
19408 PRIVATE S16 rgSCHCmnDlANFdbkInit(cell)
19413 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19414 U8 maxDlSubfrms = cell->numDlSubfrms;
19421 RgSchTddSubfrmInfo ulSubfrmInfo;
19424 TRC2(rgSCHCmnDlANFdbkInit);
19426 ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19427 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19429 /* Generate HARQ ACK/NACK feedback information for each DL sf in a radio frame
19430 * Calculate this information based on DL Association set Index table */
19431 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19433 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19434 RG_SCH_TDD_UL_SUBFRAME)
19436 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19440 for(idx=0; idx < rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19441 numFdbkSubfrms; idx++)
19443 calcSfNum = sfNum - rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19447 calcSfnOffset = RGSCH_CEIL(-calcSfNum, RGSCH_NUM_SUB_FRAMES);
19454 calcSfNum = ((RGSCH_NUM_SUB_FRAMES * calcSfnOffset) + calcSfNum)\
19455 % RGSCH_NUM_SUB_FRAMES;
19457 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19461 else if((ulSubfrmInfo.switchPoints == 2) && (calcSfNum <= \
19462 RG_SCH_CMN_SPL_SUBFRM_6))
19464 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19468 dlIdx = calcSfNum - maxUlSubfrms;
19471 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = sfNum;
19472 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = calcSfnOffset;
19473 cell->subFrms[dlIdx]->dlFdbkInfo.m = idx;
19475 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19478 /* DL subframes in the subsequent radio frames are initialized
19479 * with the previous radio frames */
19480 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;\
19483 sfNum = dlIdx - rgSchTddNumDlSubfrmTbl[ulDlCfgIdx]\
19484 [RGSCH_NUM_SUB_FRAMES-1];
19485 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = \
19486 cell->subFrms[sfNum]->dlFdbkInfo.subframe;
19487 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = \
19488 cell->subFrms[sfNum]->dlFdbkInfo.sfnOffset;
19489 cell->subFrms[dlIdx]->dlFdbkInfo.m = cell->subFrms[sfNum]->dlFdbkInfo.m;
19495 * @brief This function handles the initialization of uplink association
19496 * set information for each DL subframe.
19501 * Function: rgSCHCmnDlKdashUlAscInit
19502 * Purpose: Each DL sf stores the sfn and sf information of UL sf
19503 * in which it expects HQ ACK/NACK trans. It generates the information
19504 * based on k` in UL association set index table.
19506 * Invoked by: Scheduler
19508 * @param[in] RgSchCellCb* cell
19513 PRIVATE S16 rgSCHCmnDlKdashUlAscInit
19518 PRIVATE S16 rgSCHCmnDlKdashUlAscInit(cell)
19523 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19524 U8 maxDlSubfrms = cell->numDlSubfrms;
19530 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19531 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19532 [RGSCH_NUM_SUB_FRAMES-1];
19535 TRC2(rgSCHCmnDlKdashUlAscInit);
19537 /* Generate ACK/NACK offset information for each DL subframe in a radio frame
19538 * Calculate this information based on K` in UL Association Set table */
19539 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19541 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19542 RG_SCH_TDD_UL_SUBFRAME)
19544 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19548 calcSfNum = (sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum] + \
19549 RGSCH_NUM_SUB_FRAMES) % RGSCH_NUM_SUB_FRAMES;
19550 calcSfnOffset = sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum];
19551 if(calcSfnOffset < 0)
19553 calcSfnOffset = RGSCH_CEIL(-calcSfnOffset, RGSCH_NUM_SUB_FRAMES);
19560 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19564 else if((ulSubfrmInfo.switchPoints == 2) &&
19565 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19567 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19571 dlIdx = calcSfNum - maxUlSubfrms;
19574 cell->subFrms[dlIdx]->ulAscInfo.subframe = sfNum;
19575 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset = calcSfnOffset;
19577 /* set dlIdx for which ulAscInfo is updated */
19578 dlPres = dlPres | (1 << dlIdx);
19579 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19582 /* Set Invalid information for which ulAscInfo is not present */
19584 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19587 /* If dlPres is 0, ulAscInfo is not present in that DL index */
19588 if(! ((dlPres >> sfCount)&0x01))
19590 cell->subFrms[sfCount]->ulAscInfo.sfnOffset =
19591 RGSCH_INVALID_INFO;
19592 cell->subFrms[sfCount]->ulAscInfo.subframe =
19593 RGSCH_INVALID_INFO;
19597 /* DL subframes in the subsequent radio frames are initialized
19598 * with the previous radio frames */
19599 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;
19603 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19604 cell->subFrms[dlIdx]->ulAscInfo.subframe =
19605 cell->subFrms[sfNum]->ulAscInfo.subframe;
19606 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset =
19607 cell->subFrms[sfNum]->ulAscInfo.sfnOffset;
19614 * @brief This function initialises the 'Np' value for 'p'
19618 * Function: rgSCHCmnDlNpValInit
19619 * Purpose: To initialise the 'Np' value for each 'p'. It is used
19620 * to find the mapping between nCCE and 'p' and used in
19621 * HARQ ACK/NACK reception.
19623 * Invoked by: Scheduler
19625 * @param[in] RgSchCellCb* cell
19630 PRIVATE S16 rgSCHCmnDlNpValInit
19635 PRIVATE S16 rgSCHCmnDlNpValInit(cell)
19641 TRC2(rgSCHCmnDlNpValInit);
19643 /* Always Np is 0 for p=0 */
19644 cell->rgSchTddNpValTbl[0] = 0;
19646 for(idx=1; idx < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; idx++)
19648 np = cell->bwCfg.dlTotalBw * (idx * RG_SCH_CMN_NUM_SUBCAR - 4);
19649 cell->rgSchTddNpValTbl[idx] = (U8) (np/36);
19656 * @brief This function handles the creation of RACH preamble
19657 * list to queue the preambles and process at the scheduled
19662 * Function: rgSCHCmnDlCreateRachPrmLst
19663 * Purpose: To create RACH preamble list based on RA window size.
19664 * It is used to queue the preambles and process it at the
19667 * Invoked by: Scheduler
19669 * @param[in] RgSchCellCb* cell
19674 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst
19679 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst(cell)
19687 TRC2(rgSCHCmnDlCreateRachPrmLst);
19689 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19691 lstSize = raArrSz * RGSCH_MAX_RA_RNTI_PER_SUBFRM * RGSCH_NUM_SUB_FRAMES;
19693 cell->raInfo.maxRaSize = raArrSz;
19694 ret = rgSCHUtlAllocSBuf(cell->instIdx,
19695 (Data **)(&cell->raInfo.raReqLst), (Size)(lstSize * sizeof(CmLListCp)));
19701 cell->raInfo.lstSize = lstSize;
19708 * @brief This function handles the initialization of RACH Response
19709 * information at each DL subframe.
19713 * Function: rgSCHCmnDlRachInfoInit
19714 * Purpose: Each DL subframe stores the sfn and subframe information of
19715 * possible RACH response allowed for UL subframes. It generates
19716 * the information based on PRACH configuration.
19718 * Invoked by: Scheduler
19720 * @param[in] RgSchCellCb* cell
19725 PRIVATE S16 rgSCHCmnDlRachInfoInit
19730 PRIVATE S16 rgSCHCmnDlRachInfoInit(cell)
19735 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19738 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19739 [RGSCH_NUM_SUB_FRAMES-1];
19741 RgSchTddRachRspLst rachRspLst[3][RGSCH_NUM_SUB_FRAMES];
19749 RgSchTddRachDelInfo *delInfo;
19753 TRC2(rgSCHCmnDlRachInfoInit);
19755 cmMemset((U8 *)rachRspLst, 0, sizeof(rachRspLst));
19757 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19759 /* Include Special subframes */
19760 maxUlSubfrms = maxUlSubfrms + \
19761 rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx].switchPoints;
19762 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19764 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] ==
19765 RG_SCH_TDD_DL_SUBFRAME)
19767 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19771 startWin = (sfNum + RG_SCH_CMN_RARSP_WAIT_PRD + \
19772 ((RgSchCmnCell *)cell->sc.sch)->dl.numRaSubFrms);
19773 endWin = (startWin + cell->rachCfg.raWinSize - 1);
19775 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][startWin%RGSCH_NUM_SUB_FRAMES];
19776 /* Find the next DL subframe starting from Subframe 0 */
19777 if((startSubfrmIdx % RGSCH_NUM_SUB_FRAMES) == 0)
19779 startWin = RGSCH_CEIL(startWin, RGSCH_NUM_SUB_FRAMES);
19780 startWin = startWin * RGSCH_NUM_SUB_FRAMES;
19784 rgSchTddLowDlSubfrmIdxTbl[ulDlCfgIdx][endWin%RGSCH_NUM_SUB_FRAMES];
19785 endWin = (endWin/RGSCH_NUM_SUB_FRAMES) * RGSCH_NUM_SUB_FRAMES \
19787 if(startWin > endWin)
19791 /* Find all the possible RACH Response transmission
19792 * time within the RA window size */
19793 startSubfrmIdx = startWin%RGSCH_NUM_SUB_FRAMES;
19794 for(sfnIdx = startWin/RGSCH_NUM_SUB_FRAMES;
19795 sfnIdx <= endWin/RGSCH_NUM_SUB_FRAMES; sfnIdx++)
19797 if(sfnIdx == endWin/RGSCH_NUM_SUB_FRAMES)
19799 endSubfrmIdx = endWin%RGSCH_NUM_SUB_FRAMES;
19803 endSubfrmIdx = RGSCH_NUM_SUB_FRAMES-1;
19806 /* Find all the possible RACH Response transmission
19807 * time within radio frame */
19808 for(subfrmIdx = startSubfrmIdx;
19809 subfrmIdx <= endSubfrmIdx; subfrmIdx++)
19811 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][subfrmIdx] ==
19812 RG_SCH_TDD_UL_SUBFRAME)
19816 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
19817 /* Find the next DL subframe starting from Subframe 0 */
19818 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
19822 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx], subfrmIdx);
19824 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
19825 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset = sfnIdx;
19826 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].subframe[numSubfrms]
19828 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms++;
19830 startSubfrmIdx = RG_SCH_CMN_SUBFRM_0;
19832 /* Update the subframes to be deleted at this subframe */
19833 /* Get the subframe after the end of RA window size */
19836 sfnOffset = endWin/RGSCH_NUM_SUB_FRAMES;
19839 sfnOffset += raArrSz;
19841 sfnIdx = (endWin/RGSCH_NUM_SUB_FRAMES) % raArrSz;
19843 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx],endSubfrmIdx-1);
19844 if((endSubfrmIdx == RGSCH_NUM_SUB_FRAMES) ||
19845 (rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx] ==
19846 RGSCH_NUM_SUB_FRAMES))
19849 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][RG_SCH_CMN_SUBFRM_0];
19853 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx];
19856 delInfo = &rachRspLst[sfnIdx][subfrmIdx].delInfo;
19857 delInfo->sfnOffset = sfnOffset;
19858 delInfo->subframe[delInfo->numSubfrms] = sfNum;
19859 delInfo->numSubfrms++;
19861 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19864 ret = rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz);
19874 * @brief This function handles the initialization of PHICH information
19875 * for each DL subframe based on PHICH table.
19879 * Function: rgSCHCmnDlPhichOffsetInit
19880 * Purpose: Each DL subf stores the sfn and subf information of UL subframe
19881 * for which it trnsmts PHICH in this subframe. It generates the information
19882 * based on PHICH table.
19884 * Invoked by: Scheduler
19886 * @param[in] RgSchCellCb* cell
19891 PRIVATE S16 rgSCHCmnDlPhichOffsetInit
19896 PRIVATE S16 rgSCHCmnDlPhichOffsetInit(cell)
19901 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19902 U8 maxDlSubfrms = cell->numDlSubfrms;
19909 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19910 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19911 [RGSCH_NUM_SUB_FRAMES-1];
19913 TRC2(rgSCHCmnDlPhichOffsetInit);
19915 /* Generate PHICH offset information for each DL subframe in a radio frame
19916 * Calculate this information based on K in PHICH table */
19917 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19919 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19920 RG_SCH_TDD_UL_SUBFRAME)
19922 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19926 calcSfNum = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) % \
19927 RGSCH_NUM_SUB_FRAMES;
19928 calcSfnOffset = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) / \
19929 RGSCH_NUM_SUB_FRAMES;
19931 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19935 else if((ulSubfrmInfo.switchPoints == 2) &&
19936 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19938 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19942 dlIdx = calcSfNum - maxUlSubfrms;
19945 cell->subFrms[dlIdx]->phichOffInfo.subframe = sfNum;
19946 cell->subFrms[dlIdx]->phichOffInfo.numSubfrms = 1;
19948 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset = calcSfnOffset;
19950 /* set dlIdx for which phich offset is updated */
19951 dlPres = dlPres | (1 << dlIdx);
19952 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19955 /* Set Invalid information for which phich offset is not present */
19957 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19960 /* If dlPres is 0, phich offset is not present in that DL index */
19961 if(! ((dlPres >> sfCount)&0x01))
19963 cell->subFrms[sfCount]->phichOffInfo.sfnOffset =
19964 RGSCH_INVALID_INFO;
19965 cell->subFrms[sfCount]->phichOffInfo.subframe =
19966 RGSCH_INVALID_INFO;
19967 cell->subFrms[sfCount]->phichOffInfo.numSubfrms = 0;
19971 /* DL subframes in the subsequent radio frames are
19972 * initialized with the previous radio frames */
19973 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms;
19974 dlIdx < maxDlSubfrms; dlIdx++)
19977 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19979 cell->subFrms[dlIdx]->phichOffInfo.subframe =
19980 cell->subFrms[sfNum]->phichOffInfo.subframe;
19982 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset =
19983 cell->subFrms[sfNum]->phichOffInfo.sfnOffset;
19990 * @brief Updation of Sch vars per TTI.
19994 * Function: rgSCHCmnUpdVars
19995 * Purpose: Updation of Sch vars per TTI.
19997 * @param[in] RgSchCellCb *cell
20002 PUBLIC Void rgSCHCmnUpdVars
20007 PUBLIC Void rgSCHCmnUpdVars(cell)
20011 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
20012 CmLteTimingInfo timeInfo;
20015 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
20018 TRC2(rgSCHCmnUpdVars);
20020 /* ccpu00132654-ADD- Initializing all the indices in every subframe*/
20021 rgSCHCmnInitVars(cell);
20023 idx = (cell->crntTime.slot + TFU_ULCNTRL_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20024 /* Calculate the UL scheduling subframe idx based on the
20026 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][idx] != 0)
20028 /* PUSCH transmission is based on offset from DL
20029 * PDCCH scheduling */
20030 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20031 ulSubframe = rgSchTddPuschTxKTbl[ulDlCfgIdx][timeInfo.subframe];
20032 /* Add the DCI-0 to PUSCH time to get the time of UL subframe */
20033 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, ulSubframe);
20035 cellUl->schdTti = timeInfo.sfn * 10 + timeInfo.subframe;
20037 /* Fetch the corresponding UL subframe Idx in UL sf array */
20038 cellUl->schdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20039 /* Fetch the corresponding UL Harq Proc ID */
20040 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20041 cellUl->schdTime = timeInfo;
20043 Mval = rgSchTddPhichMValTbl[ulDlCfgIdx][idx];
20046 /* Fetch the tx time for DL HIDCI-0 */
20047 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20048 /* Fetch the corresponding n-k tx time of PUSCH */
20049 cellUl->hqFdbkIdx[0] = rgSCHCmnGetPhichUlSfIdx(&timeInfo, cell);
20050 /* Retx will happen according to the Pusch k table */
20051 cellUl->reTxIdx[0] = cellUl->schdIdx;
20053 if(ulDlCfgIdx == 0)
20055 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[0] */
20056 cellUl->reTxIdx[0] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20057 cellUl->hqFdbkIdx[0]);
20060 /* At Idx 1 store the UL SF adjacent(left) to the UL SF
20062 cellUl->hqFdbkIdx[1] = (cellUl->hqFdbkIdx[0]-1 +
20063 cellUl->numUlSubfrms) % cellUl->numUlSubfrms;
20064 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[1] */
20065 cellUl->reTxIdx[1] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20066 cellUl->hqFdbkIdx[1]);
20071 idx = (cell->crntTime.slot + TFU_RECPREQ_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20072 if (rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] == RG_SCH_TDD_UL_SUBFRAME)
20074 RGSCHCMNADDTOCRNTTIME(cell->crntTime, timeInfo, TFU_RECPREQ_DLDELTA)
20075 cellUl->rcpReqIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20077 idx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) % RGSCH_NUM_SUB_FRAMES;
20079 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
20080 special subframe */
20081 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] != RG_SCH_TDD_UL_SUBFRAME)
20083 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20084 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20085 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, msg3Subfrm);
20086 cellUl->msg3SchdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20087 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20090 if(!rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][idx])
20092 cellUl->spsUlRsrvIdx = RGSCH_INVALID_INFO;
20096 /* introduce some reuse with above code? */
20098 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20099 //offst = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20100 offst = rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][timeInfo.subframe];
20101 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, offst);
20102 cellUl->spsUlRsrvIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20103 /* The harq proc continues to be accessed and used the same delta before
20104 * actual data occurance, and hence use the same idx */
20105 cellUl->spsUlRsrvHqProcIdx = cellUl->schdHqProcIdx;
20109 /* RACHO: update cmn sched specific RACH variables,
20110 * mainly the prachMaskIndex */
20111 rgSCHCmnUpdRachParam(cell);
20117 * @brief To get 'p' value from nCCE.
20121 * Function: rgSCHCmnGetPValFrmCCE
20122 * Purpose: Gets 'p' value for HARQ ACK/NACK reception from CCE.
20124 * @param[in] RgSchCellCb *cell
20125 * @param[in] U8 cce
20130 PUBLIC U8 rgSCHCmnGetPValFrmCCE
20136 PUBLIC U8 rgSCHCmnGetPValFrmCCE(cell, cce)
20142 TRC2(rgSCHCmnGetPValFrmCCE);
20144 for(i=1; i < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; i++)
20146 if(cce < cell->rgSchTddNpValTbl[i])
20155 /***********************************************************
20157 * Func : rgSCHCmnUlAdapRetx
20159 * Desc : Adaptive retransmission for an allocation.
20167 **********************************************************/
20169 PRIVATE Void rgSCHCmnUlAdapRetx
20171 RgSchUlAlloc *alloc,
20172 RgSchUlHqProcCb *proc
20175 PRIVATE Void rgSCHCmnUlAdapRetx(alloc, proc)
20176 RgSchUlAlloc *alloc;
20177 RgSchUlHqProcCb *proc;
20180 TRC2(rgSCHCmnUlAdapRetx);
20182 rgSCHUhmRetx(proc, alloc);
20184 if (proc->rvIdx != 0)
20186 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[proc->rvIdx];
20191 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
20197 * @brief Scheduler invocation per TTI.
20201 * Function: rgSCHCmnHdlUlInactUes
20204 * Invoked by: Common Scheduler
20206 * @param[in] RgSchCellCb *cell
20210 PRIVATE Void rgSCHCmnHdlUlInactUes
20215 PRIVATE Void rgSCHCmnHdlUlInactUes(cell)
20219 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20220 CmLListCp ulInactvLst;
20221 TRC2(rgSCHCmnHdlUlInactUes);
20222 /* Get a List of Inactv UEs for UL*/
20223 cmLListInit(&ulInactvLst);
20225 /* Trigger Spfc Schedulers with Inactive UEs */
20226 rgSCHMeasGapANRepGetUlInactvUe (cell, &ulInactvLst);
20227 /* take care of this in UL retransmission */
20228 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInactvLst);
20234 * @brief Scheduler invocation per TTI.
20238 * Function: rgSCHCmnHdlDlInactUes
20241 * Invoked by: Common Scheduler
20243 * @param[in] RgSchCellCb *cell
20247 PRIVATE Void rgSCHCmnHdlDlInactUes
20252 PRIVATE Void rgSCHCmnHdlDlInactUes(cell)
20256 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20257 CmLListCp dlInactvLst;
20258 TRC2(rgSCHCmnHdlDlInactUes);
20259 /* Get a List of Inactv UEs for DL */
20260 cmLListInit(&dlInactvLst);
20262 /* Trigger Spfc Schedulers with Inactive UEs */
20263 rgSCHMeasGapANRepGetDlInactvUe (cell, &dlInactvLst);
20265 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInactvLst);
20269 /* RACHO: Rach handover functions start here */
20270 /***********************************************************
20272 * Func : rgSCHCmnUeIdleExdThrsld
20274 * Desc : RETURN ROK if UE has been idle more
20283 **********************************************************/
20285 PRIVATE S16 rgSCHCmnUeIdleExdThrsld
20291 PRIVATE S16 rgSCHCmnUeIdleExdThrsld(cell, ue)
20296 /* Time difference in subframes */
20297 U32 sfDiff = RGSCH_CALC_SF_DIFF(cell->crntTime, ue->ul.ulTransTime);
20299 TRC2(rgSCHCmnUeIdleExdThrsld);
20301 if (sfDiff > (U32)RG_SCH_CMN_UE_IDLE_THRSLD(ue))
20313 * @brief Scheduler processing for Ded Preambles on cell configuration.
20317 * Function : rgSCHCmnCfgRachDedPrm
20319 * This function does requisite initialisation
20320 * for RACH Ded Preambles.
20323 * @param[in] RgSchCellCb *cell
20327 PRIVATE Void rgSCHCmnCfgRachDedPrm
20332 PRIVATE Void rgSCHCmnCfgRachDedPrm(cell)
20336 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20337 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20340 TRC2(rgSCHCmnCfgRachDedPrm);
20342 if (cell->macPreambleSet.pres == NOTPRSNT)
20346 cellSch->rachCfg.numDedPrm = cell->macPreambleSet.size;
20347 cellSch->rachCfg.dedPrmStart = cell->macPreambleSet.start;
20348 /* Initialize handover List */
20349 cmLListInit(&cellSch->rachCfg.hoUeLst);
20350 /* Initialize pdcch Order List */
20351 cmLListInit(&cellSch->rachCfg.pdcchOdrLst);
20353 /* Intialize the rapId to UE mapping structure */
20354 for (cnt = 0; cnt<cellSch->rachCfg.numDedPrm; cnt++)
20356 cellSch->rachCfg.rapIdMap[cnt].rapId = cellSch->rachCfg.dedPrmStart + \
20358 cmLListInit(&cellSch->rachCfg.rapIdMap[cnt].assgndUes);
20360 /* Perform Prach Mask Idx, remDedPrm, applFrm initializations */
20361 /* Set remDedPrm as numDedPrm */
20362 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20363 /* Initialize applFrm */
20364 cellSch->rachCfg.prachMskIndx = 0;
20365 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_EVEN)
20367 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + \
20368 (cell->crntTime.sfn % 2)) % RGSCH_MAX_SFN;
20371 else if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ODD)
20373 if((cell->crntTime.sfn%2) == 0)
20375 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + 1)\
20382 cellSch->rachCfg.applFrm.sfn = cell->crntTime.sfn;
20384 /* Initialize cellSch->rachCfg.applFrm as >= crntTime.
20385 * This is because of RGSCH_CALC_SF_DIFF logic */
20386 if (cellSch->rachCfg.applFrm.sfn == cell->crntTime.sfn)
20388 while (cellSch->rachCfg.prachMskIndx < cell->rachCfg.raOccasion.size)
20390 if (cell->crntTime.slot <\
20391 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx])
20395 cellSch->rachCfg.prachMskIndx++;
20397 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size)
20399 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20401 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) %\
20406 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) %\
20409 cellSch->rachCfg.prachMskIndx = 0;
20411 cellSch->rachCfg.applFrm.slot = \
20412 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20416 cellSch->rachCfg.applFrm.slot = \
20417 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20420 /* Note first param to this macro should always be the latest in time */
20421 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20422 while (sfDiff <= gap)
20424 rgSCHCmnUpdNxtPrchMskIdx(cell);
20425 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20432 * @brief Updates the PRACH MASK INDEX.
20436 * Function: rgSCHCmnUpdNxtPrchMskIdx
20437 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20438 * CFG is always >= "n"+"DELTA", where "n" is the crntTime
20439 * of the cell. If not, applFrm is updated to the next avl
20440 * PRACH oppurtunity as per the PRACH Cfg Index configuration.
20443 * Invoked by: Common Scheduler
20445 * @param[in] RgSchCellCb *cell
20449 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx
20454 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx(cell)
20458 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20459 TRC2(rgSCHCmnUpdNxtPrchMskIdx);
20461 /* Determine the next prach mask Index */
20462 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size - 1)
20464 /* PRACH within applFrm.sfn are done, go to next AVL sfn */
20465 cellSch->rachCfg.prachMskIndx = 0;
20466 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20468 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) % \
20471 else/* RGR_SFN_EVEN or RGR_SFN_ODD */
20473 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) % \
20476 cellSch->rachCfg.applFrm.slot = cell->rachCfg.raOccasion.\
20479 else /* applFrm.sfn is still valid */
20481 cellSch->rachCfg.prachMskIndx += 1;
20482 if ( cellSch->rachCfg.prachMskIndx < RGR_MAX_SUBFRAME_NUM )
20484 cellSch->rachCfg.applFrm.slot = \
20485 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20492 * @brief Updates the Ded preamble RACH parameters
20497 * Function: rgSCHCmnUpdRachParam
20498 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20499 * CFG is always >= "n"+"6"+"DELTA", where "n" is the crntTime
20500 * of the cell. If not, applFrm is updated to the next avl
20501 * PRACH oppurtunity as per the PRACH Cfg Index configuration,
20502 * accordingly the "remDedPrm" is reset to "numDedPrm" and
20503 * "prachMskIdx" field is updated as per "applFrm".
20506 * Invoked by: Common Scheduler
20508 * @param[in] RgSchCellCb *cell
20512 PRIVATE Void rgSCHCmnUpdRachParam
20517 PRIVATE Void rgSCHCmnUpdRachParam(cell)
20522 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20523 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20525 TRC2(rgSCHCmnUpdRachParam);
20527 if (cell->macPreambleSet.pres == NOTPRSNT)
20531 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, \
20535 /* applFrm is still a valid next Prach Oppurtunity */
20538 rgSCHCmnUpdNxtPrchMskIdx(cell);
20539 /* Reset remDedPrm as numDedPrm */
20540 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20546 * @brief Dedicated Preamble allocation function.
20550 * Function: rgSCHCmnAllocPOParam
20551 * Purpose: Allocate pdcch, rapId and PrachMskIdx.
20552 * Set mapping of UE with the allocated rapId.
20554 * Invoked by: Common Scheduler
20556 * @param[in] RgSchCellCb *cell
20557 * @param[in] RgSchDlSf *dlSf
20558 * @param[in] RgSchUeCb *ue
20559 * @param[out] RgSchPdcch **pdcch
20560 * @param[out] U8 *rapId
20561 * @param[out] U8 *prachMskIdx
20565 PRIVATE S16 rgSCHCmnAllocPOParam
20570 RgSchPdcch **pdcch,
20575 PRIVATE S16 rgSCHCmnAllocPOParam(cell, dlSf, ue, pdcch, rapId, prachMskIdx)
20579 RgSchPdcch **pdcch;
20585 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20586 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20588 TRC2(rgSCHCmnAllocPOParam);
20590 if (cell->macPreambleSet.pres == PRSNT_NODEF)
20592 if (cellSch->rachCfg.remDedPrm == 0)
20596 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20597 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20601 /* The stored prachMskIdx is the index of PRACH Oppurtunities in
20602 * raOccasions.subframes[].
20603 * Converting the same to the actual PRACHMskIdx to be transmitted. */
20604 *prachMskIdx = cellSch->rachCfg.prachMskIndx + 1;
20605 /* Distribution starts from dedPrmStart till dedPrmStart + numDedPrm */
20606 *rapId = cellSch->rachCfg.dedPrmStart +
20607 cellSch->rachCfg.numDedPrm - cellSch->rachCfg.remDedPrm;
20608 cellSch->rachCfg.remDedPrm--;
20609 /* Map UE with the allocated RapId */
20610 ueDl->rachInfo.asgnOppr = cellSch->rachCfg.applFrm;
20611 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, cellSch->rachCfg.rapIdMap, (*rapId - cellSch->rachCfg.dedPrmStart));
20612 cmLListAdd2Tail(&cellSch->rachCfg.rapIdMap[*rapId - cellSch->rachCfg.dedPrmStart].assgndUes,
20613 &ueDl->rachInfo.rapIdLnk);
20614 ueDl->rachInfo.rapIdLnk.node = (PTR)ue;
20615 ueDl->rachInfo.poRapId = *rapId;
20617 else /* if dedicated preambles not configured */
20619 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20620 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20632 * @brief Dowlink Scheduling Handler.
20636 * Function: rgSCHCmnGenPdcchOrder
20637 * Purpose: For each UE in PO Q, grab a PDCCH,
20638 * get an available ded RapId and fill PDCCH
20639 * with PO information.
20641 * Invoked by: Common Scheduler
20643 * @param[in] RgSchCellCb *cell
20644 * @param[in] RgSchDlSf *dlSf
20648 PRIVATE Void rgSCHCmnGenPdcchOrder
20654 PRIVATE Void rgSCHCmnGenPdcchOrder(cell, dlSf)
20659 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20660 CmLList *node = cellSch->rachCfg.pdcchOdrLst.first;
20664 RgSchPdcch *pdcch = NULLP;
20666 TRC2(rgSCHCmnGenPdcchOrder);
20670 ue = (RgSchUeCb *)node->node;
20672 /* Skip sending for this subframe is Measuring or inActive in UL due
20673 * to MeasGap or inactie due to DRX
20675 if ((ue->measGapCb.isMeasuring == TRUE) ||
20676 (ue->ul.ulInactvMask & RG_MEASGAP_INACTIVE) ||
20677 (ue->isDrxEnabled &&
20678 ue->dl.dlInactvMask & RG_DRX_INACTIVE)
20683 if (rgSCHCmnAllocPOParam(cell, dlSf, ue, &pdcch, &rapId,\
20684 &prachMskIdx) != ROK)
20686 /* No More rapIds left for the valid next avl Oppurtunity.
20687 * Unsatisfied UEs here would be given a chance, when the
20688 * prach Mask Index changes as per rachUpd every TTI */
20690 /* PDDCH can also be ordered with rapId=0, prachMskIdx=0
20691 * so that UE triggers a RACH procedure with non-dedicated preamble.
20692 * But the implementation here does not do this. Instead, the "break"
20693 * here implies, that PDCCH Odr always given with valid rapId!=0,
20694 * prachMskIdx!=0 if dedicated preambles are configured.
20695 * If not configured, then trigger a PO with rapId=0,prchMskIdx=0*/
20698 /* Fill pdcch with pdcch odr information */
20699 rgSCHCmnFillPdcchOdr2Sf(cell, ue, pdcch, rapId, prachMskIdx);
20700 /* Remove this UE from the PDCCH ORDER QUEUE */
20701 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20702 /* Reset UE's power state */
20703 rgSCHPwrUeReset(cell, ue);
20710 * @brief This function add UE to PdcchOdr Q if not already present.
20714 * Function: rgSCHCmnDlAdd2PdcchOdrQ
20717 * Invoked by: CMN Scheduler
20719 * @param[in] RgSchCellCb* cell
20720 * @param[in] RgSchUeCb* ue
20725 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ
20731 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ(cell, ue)
20736 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20737 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20739 TRC2(rgSCHCmnDlAdd2PdcchOdrQ);
20741 if (ueDl->rachInfo.poLnk.node == NULLP)
20743 cmLListAdd2Tail(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20744 ueDl->rachInfo.poLnk.node = (PTR)ue;
20751 * @brief This function rmvs UE to PdcchOdr Q if not already present.
20755 * Function: rgSCHCmnDlRmvFrmPdcchOdrQ
20758 * Invoked by: CMN Scheduler
20760 * @param[in] RgSchCellCb* cell
20761 * @param[in] RgSchUeCb* ue
20766 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ
20772 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue)
20777 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20778 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20780 TRC2(rgSCHCmnDlRmvFrmPdcchOdrQ);
20782 cmLListDelFrm(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20783 ueDl->rachInfo.poLnk.node = NULLP;
20788 * @brief Fill pdcch with PDCCH order information.
20792 * Function: rgSCHCmnFillPdcchOdr2Sf
20793 * Purpose: Fill PDCCH with PDCCH order information,
20795 * Invoked by: Common Scheduler
20797 * @param[in] RgSchUeCb *ue
20798 * @param[in] RgSchPdcch *pdcch
20799 * @param[in] U8 rapId
20800 * @param[in] U8 prachMskIdx
20804 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf
20813 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf(ue, pdcch, rapId, prachMskIdx)
20821 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
20823 TRC2(rgSCHCmnFillPdcchOdr2Sf);
20825 pdcch->rnti = ue->ueId;
20826 pdcch->dci.dciFormat = TFU_DCI_FORMAT_1A;
20827 pdcch->dci.u.format1aInfo.isPdcchOrder = TRUE;
20828 pdcch->dci.u.format1aInfo.t.pdcchOrder.preambleIdx = rapId;
20829 pdcch->dci.u.format1aInfo.t.pdcchOrder.prachMaskIdx = prachMskIdx;
20831 /* Request for APer CQI immediately after PDCCH Order */
20832 /* CR ccpu00144525 */
20834 if(ue->dl.ueDlCqiCfg.aprdCqiCfg.pres)
20836 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
20837 acqiCb->aCqiTrigWt = 0;
20846 * @brief UE deletion for scheduler.
20850 * Function : rgSCHCmnDelRachInfo
20852 * This functions deletes all scheduler information
20853 * pertaining to an UE.
20855 * @param[in] RgSchCellCb *cell
20856 * @param[in] RgSchUeCb *ue
20860 PRIVATE Void rgSCHCmnDelRachInfo
20866 PRIVATE Void rgSCHCmnDelRachInfo(cell, ue)
20871 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20872 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20875 TRC2(rgSCHCmnDelRachInfo);
20877 if (ueDl->rachInfo.poLnk.node)
20879 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20881 if (ueDl->rachInfo.hoLnk.node)
20883 cmLListDelFrm(&cellSch->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
20884 ueDl->rachInfo.hoLnk.node = NULLP;
20886 if (ueDl->rachInfo.rapIdLnk.node)
20888 rapIdIdx = ueDl->rachInfo.poRapId - cellSch->rachCfg.dedPrmStart;
20889 cmLListDelFrm(&cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes,
20890 &ueDl->rachInfo.rapIdLnk);
20891 ueDl->rachInfo.rapIdLnk.node = NULLP;
20897 * @brief This function retrieves the ue which has sent this raReq
20898 * and it allocates grant for UEs undergoing (for which RAR
20899 * is being generated) HandOver/PdcchOrder.
20904 * Function: rgSCHCmnHdlHoPo
20905 * Purpose: This function retrieves the ue which has sent this raReq
20906 * and it allocates grant for UEs undergoing (for which RAR
20907 * is being generated) HandOver/PdcchOrder.
20909 * Invoked by: Common Scheduler
20911 * @param[in] RgSchCellCb *cell
20912 * @param[out] CmLListCp *raRspLst
20913 * @param[in] RgSchRaReqInfo *raReq
20918 PRIVATE Void rgSCHCmnHdlHoPo
20921 CmLListCp *raRspLst,
20922 RgSchRaReqInfo *raReq
20925 PRIVATE Void rgSCHCmnHdlHoPo(cell, raRspLst, raReq)
20927 CmLListCp *raRspLst;
20928 RgSchRaReqInfo *raReq;
20931 RgSchUeCb *ue = raReq->ue;
20932 TRC2(rgSCHCmnHdlHoPo);
20934 if ( ue->isDrxEnabled )
20936 rgSCHDrxDedRa(cell,ue);
20938 rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq);
20943 * @brief This function retrieves the UE which has sent this raReq
20944 * for handover case.
20949 * Function: rgSCHCmnGetHoUe
20950 * Purpose: This function retrieves the UE which has sent this raReq
20951 * for handover case.
20953 * Invoked by: Common Scheduler
20955 * @param[in] RgSchCellCb *cell
20956 * @param[in] RgSchRaReqInfo *raReq
20957 * @return RgSchUeCb*
20961 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe
20967 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe(cell, rapId)
20972 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20976 RgSchCmnDlUe *ueDl;
20977 TRC2(rgSCHCmnGetHoUe);
20979 ueLst = &cellSch->rachCfg.hoUeLst;
20980 node = ueLst->first;
20983 ue = (RgSchUeCb *)node->node;
20985 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20986 if (ueDl->rachInfo.hoRapId == rapId)
20995 PRIVATE Void rgSCHCmnDelDedPreamble
21001 PRIVATE rgSCHCmnDelDedPreamble(cell, preambleId)
21006 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21010 RgSchCmnDlUe *ueDl;
21011 TRC2(rgSCHCmnDelDedPreamble);
21013 ueLst = &cellSch->rachCfg.hoUeLst;
21014 node = ueLst->first;
21017 ue = (RgSchUeCb *)node->node;
21019 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21020 if (ueDl->rachInfo.hoRapId == preambleId)
21022 cmLListDelFrm(ueLst, &ueDl->rachInfo.hoLnk);
21023 ueDl->rachInfo.hoLnk.node = (PTR)NULLP;
21029 * @brief This function retrieves the UE which has sent this raReq
21030 * for PDCCh Order case.
21035 * Function: rgSCHCmnGetPoUe
21036 * Purpose: This function retrieves the UE which has sent this raReq
21037 * for PDCCH Order case.
21039 * Invoked by: Common Scheduler
21041 * @param[in] RgSchCellCb *cell
21042 * @param[in] RgSchRaReqInfo *raReq
21043 * @return RgSchUeCb*
21047 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe
21051 CmLteTimingInfo timingInfo
21054 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe(cell, rapId, timingInfo)
21057 CmLteTimingInfo timingInfo;
21060 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21064 RgSchCmnDlUe *ueDl;
21066 TRC2(rgSCHCmnGetPoUe);
21068 rapIdIdx = rapId -cellSch->rachCfg.dedPrmStart;
21069 ueLst = &cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes;
21070 node = ueLst->first;
21073 ue = (RgSchUeCb *)node->node;
21075 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21076 /* Remove UEs irrespective.
21077 * Old UE associations are removed.*/
21078 cmLListDelFrm(ueLst, &ueDl->rachInfo.rapIdLnk);
21079 ueDl->rachInfo.rapIdLnk.node = (PTR)NULLP;
21080 if (RGSCH_TIMEINFO_SAME(ueDl->rachInfo.asgnOppr, timingInfo))
21091 * @brief This function returns the valid UL cqi for a given UE.
21095 * Function: rgSCHCmnUlGetCqi
21096 * Purpose: This function returns the "valid UL cqi" for a given UE
21097 * based on UE category
21099 * Invoked by: Scheduler
21101 * @param[in] RgSchUeCb *ue
21102 * @param[in] U8 ueCtgy
21106 PUBLIC U8 rgSCHCmnUlGetCqi
21110 CmLteUeCategory ueCtgy
21113 PUBLIC U8 rgSCHCmnUlGetCqi(cell, ue, ueCtgy)
21116 CmLteUeCategory ueCtgy;
21119 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21122 TRC2(rgSCHCmnUlGetCqi);
21124 cqi = ueUl->maxUlCqi;
21126 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21127 (ueUl->validUlCqi > ueUl->maxUlCqi)))
21129 cqi = ueUl->validUlCqi;
21132 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21133 (ueUl->crntUlCqi[0] > ueUl->maxUlCqi )))
21135 cqi = ueUl->crntUlCqi[0];
21139 }/* End of rgSCHCmnUlGetCqi */
21141 /***********************************************************
21143 * Func : rgSCHCmnUlRbAllocForPoHoUe
21145 * Desc : Do uplink RB allocation for a HO/PO UE.
21149 * Notes: Note that as of now, for retx, maxRb
21150 * is not considered. Alternatives, such
21151 * as dropping retx if it crosses maxRb
21152 * could be considered.
21156 **********************************************************/
21158 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe
21166 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, maxRb)
21173 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21174 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21175 U8 sbSize = cellUl->sbSize;
21176 U32 maxBits = ue->ul.maxBytesPerUePerTti*8;
21178 RgSchUlAlloc *alloc;
21188 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->msg3SchdHqProcIdx];
21189 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
21191 TRC2(rgSCHCmnUlRbAllocForPoHoUe);
21192 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
21196 /*MS_WORKAROUND for HO ccpu00121116*/
21197 cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
21198 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend], cqi);
21199 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21200 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
21201 while(iMcs > RG_SCH_CMN_MAX_MSG3_IMCS)
21204 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21205 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg);
21207 /* Filling the modorder in the grant structure*/
21208 RG_SCH_UL_MCS_TO_MODODR(iMcs,modOdr);
21209 if (!cell->isCpUlExtend)
21211 eff = rgSchCmnNorUlEff[0][iTbs];
21215 eff = rgSchCmnExtUlEff[0][iTbs];
21218 bits = ueUl->alloc.reqBytes * 8;
21220 #if (ERRCLASS & ERRCLS_DEBUG)
21227 if (bits < rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs))
21230 nPrb = numSb * sbSize;
21234 if (bits > maxBits)
21237 nPrb = bits * 1024 / eff / RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
21242 numSb = nPrb / sbSize;
21246 /*ccpu00128775:MOD-Change to get upper threshold nPrb*/
21247 nPrb = RGSCH_CEIL((RGSCH_CEIL(bits * 1024, eff)),
21248 RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl));
21253 numSb = RGSCH_DIV_ROUND(nPrb, sbSize);
21258 alloc = rgSCHCmnUlSbAlloc(sf, (U8)RGSCH_MIN(numSb, cellUl->maxSbPerUe),\
21260 if (alloc == NULLP)
21262 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
21263 "rgSCHCmnUlRbAllocForPoHoUe(): Could not get UlAlloc");
21266 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
21268 /* Filling the modorder in the grant structure start*/
21269 alloc->grnt.modOdr = (TfuModScheme) modOdr;
21270 alloc->grnt.iMcs = iMcs;
21271 alloc->grnt.iMcsCrnt = iMcsCrnt;
21272 alloc->grnt.hop = 0;
21273 /* Fix for ccpu00123915*/
21274 alloc->forMsg3 = TRUE;
21275 alloc->hqProc = proc;
21276 alloc->hqProc->ulSfIdx = cellUl->msg3SchdIdx;
21278 alloc->rnti = ue->ueId;
21279 /* updating initNumRbs in case of HO */
21281 ue->initNumRbs = alloc->grnt.numRb;
21283 ueUl->alloc.alloc = alloc;
21284 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
21285 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
21286 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
21287 /* MS_WORKAROUND for HO ccpu00121124*/
21288 /*[Adi temp change] Need to fil modOdr */
21289 RG_SCH_UL_MCS_TO_MODODR(alloc->grnt.iMcsCrnt,alloc->grnt.modOdr);
21290 rgSCHUhmNewTx(proc, ueUl->hqEnt.maxHqRetx, alloc);
21291 /* No grant attr recorded now */
21296 * @brief This function allocates grant for UEs undergoing (for which RAR
21297 * is being generated) HandOver/PdcchOrder.
21302 * Function: rgSCHCmnAllocPoHoGrnt
21303 * Purpose: This function allocates grant for UEs undergoing (for which RAR
21304 * is being generated) HandOver/PdcchOrder.
21306 * Invoked by: Common Scheduler
21308 * @param[in] RgSchCellCb *cell
21309 * @param[out] CmLListCp *raRspLst,
21310 * @param[in] RgSchUeCb *ue
21311 * @param[in] RgSchRaReqInfo *raReq
21316 PRIVATE Void rgSCHCmnAllocPoHoGrnt
21319 CmLListCp *raRspLst,
21321 RgSchRaReqInfo *raReq
21324 PRIVATE Void rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq)
21326 CmLListCp *raRspLst;
21328 RgSchRaReqInfo *raReq;
21331 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21332 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21334 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
21336 TRC2(rgSCHCmnAllocPoHoGrnt);
21338 /* Clearing previous allocs if any*/
21339 rgSCHCmnUlUeDelAllocs(cell, ue);
21340 /* Fix : syed allocs are limited */
21341 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
21345 ueUl->alloc.reqBytes = RG_SCH_MIN_GRNT_HOPO;
21346 if (rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, RGSCH_MAX_UL_RB) != ROK)
21351 /* Fill grant information */
21352 grnt = &ueUl->alloc.alloc->grnt;
21357 RLOG_ARG1(L_ERROR,DBG_INSTID,cell->instIdx, "Failed to get"
21358 "the grant for HO/PDCCH Order. CRNTI:%d",ue->ueId);
21361 ue->ul.rarGrnt.rapId = raReq->raReq.rapId;
21362 ue->ul.rarGrnt.hop = grnt->hop;
21363 ue->ul.rarGrnt.rbStart = grnt->rbStart;
21364 ue->ul.rarGrnt.numRb = grnt->numRb;
21365 ue->ul.rarGrnt.tpc = grnt->tpc;
21366 ue->ul.rarGrnt.iMcsCrnt = grnt->iMcsCrnt;
21367 ue->ul.rarGrnt.ta.pres = TRUE;
21368 ue->ul.rarGrnt.ta.val = raReq->raReq.ta;
21369 ue->ul.rarGrnt.datSz = grnt->datSz;
21370 if((sf->numACqiCount < RG_SCH_MAX_ACQI_PER_ULSF) && (RG_SCH_APCQI_NO != ue->dl.reqForCqi))
21374 /* Send two bits cqireq field if more than one cells are configured else one*/
21375 for (idx = 1;idx < CM_LTE_MAX_CELLS;idx++)
21377 if (ue->cellInfo[idx] != NULLP)
21379 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21383 if (idx == CM_LTE_MAX_CELLS)
21386 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21388 ue->dl.reqForCqi = RG_SCH_APCQI_NO;
21389 sf->numACqiCount++;
21393 ue->ul.rarGrnt.cqiReqBit = 0;
21395 /* Attach Ho/Po allocation to RAR Rsp cont free Lst */
21396 cmLListAdd2Tail(raRspLst, &ue->ul.rarGrnt.raRspLnk);
21397 ue->ul.rarGrnt.raRspLnk.node = (PTR)ue;
21403 * @brief This is a utility function to set the fields in
21404 * an UL harq proc which is identified for non-adaptive retx
21408 * Function: rgSCHCmnUlNonadapRetx
21409 * Purpose: Sets the fields in UL Harq proc for non-adaptive retx
21411 * @param[in] RgSchCmnUlCell *cellUl
21412 * @param[out] RgSchUlAlloc *alloc
21413 * @param[in] U8 idx
21419 PRIVATE Void rgSCHCmnUlNonadapRetx
21421 RgSchCmnUlCell *cellUl,
21422 RgSchUlAlloc *alloc,
21426 PRIVATE Void rgSCHCmnUlNonadapRetx(cellUl, alloc, idx)
21427 RgSchCmnUlCell *cellUl;
21428 RgSchUlAlloc *alloc;
21432 TRC2(rgSCHCmnUlNonadapRetx);
21433 rgSCHUhmRetx(alloc->hqProc, alloc);
21435 /* Update alloc to retx */
21436 alloc->hqProc->isRetx = TRUE;
21437 alloc->hqProc->ulSfIdx = cellUl->reTxIdx[idx];
21439 if (alloc->hqProc->rvIdx != 0)
21441 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[alloc->hqProc->rvIdx];
21445 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
21447 alloc->grnt.isRtx = TRUE;
21448 alloc->pdcch = NULLP;
21452 * @brief Check if 2 allocs overlap
21456 * Function : rgSCHCmnUlAllocsOvrLap
21458 * - Return TRUE if alloc1 and alloc2 overlap.
21460 * @param[in] RgSchUlAlloc *alloc1
21461 * @param[in] RgSchUlAlloc *alloc2
21465 PRIVATE Bool rgSCHCmnUlAllocsOvrLap
21467 RgSchUlAlloc *alloc1,
21468 RgSchUlAlloc *alloc2
21471 PRIVATE Bool rgSCHCmnUlAllocsOvrLap(alloc1, alloc2)
21472 RgSchUlAlloc *alloc1;
21473 RgSchUlAlloc *alloc2;
21477 TRC2(rgSCHCmnUlAllocsOvrLap);
21479 if (((alloc1->sbStart >= alloc2->sbStart) &&
21480 (alloc1->sbStart <= alloc2->sbStart + alloc2->numSb-1)) ||
21481 ((alloc2->sbStart >= alloc1->sbStart) &&
21482 (alloc2->sbStart <= alloc1->sbStart + alloc1->numSb-1)))
21489 * @brief Copy allocation Info from src to dst.
21493 * Function : rgSCHCmnUlCpyAllocInfo
21495 * - Copy allocation Info from src to dst.
21497 * @param[in] RgSchUlAlloc *srcAlloc
21498 * @param[in] RgSchUlAlloc *dstAlloc
21502 PRIVATE Void rgSCHCmnUlCpyAllocInfo
21505 RgSchUlAlloc *srcAlloc,
21506 RgSchUlAlloc *dstAlloc
21509 PRIVATE Void rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc)
21511 RgSchUlAlloc *srcAlloc;
21512 RgSchUlAlloc *dstAlloc;
21515 RgSchCmnUlUe *ueUl;
21516 TRC2(rgSCHCmnUlCpyAllocInfo);
21518 dstAlloc->grnt = srcAlloc->grnt;
21519 dstAlloc->hqProc = srcAlloc->hqProc;
21520 /* Fix : syed During UE context release, hqProc->alloc
21521 * was pointing to srcAlloc instead of dstAlloc and
21522 * freeing from incorrect sf->allocDb was
21523 * corrupting the list. */
21524 /* In case of SPS Occasion Allocation is done in advance and
21525 at a later time Hq Proc is linked. Hence HqProc
21526 pointer in alloc shall be NULL */
21528 if (dstAlloc->hqProc)
21531 dstAlloc->hqProc->alloc = dstAlloc;
21533 dstAlloc->ue = srcAlloc->ue;
21534 dstAlloc->rnti = srcAlloc->rnti;
21535 dstAlloc->forMsg3 = srcAlloc->forMsg3;
21536 dstAlloc->raCb = srcAlloc->raCb;
21537 dstAlloc->pdcch = srcAlloc->pdcch;
21538 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
21541 ueUl = RG_SCH_CMN_GET_UL_UE(dstAlloc->ue,cell);
21542 ueUl->alloc.alloc = dstAlloc;
21544 if (dstAlloc->ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
21546 if((dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc != NULLP)
21547 && (dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc == srcAlloc))
21549 dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc = dstAlloc;
21558 * @brief Update TX and RETX subframe's allocation
21563 * Function : rgSCHCmnUlInsAllocFrmNewSf2OldSf
21565 * - Release all preassigned allocations of newSf and merge
21567 * - If alloc of newSf collide with one or more allocs of oldSf
21568 * - mark all such allocs of oldSf for Adaptive Retx.
21569 * - Swap the alloc and hole DB references of oldSf and newSf.
21571 * @param[in] RgSchCellCb *cell
21572 * @param[in] RgSchUlSf *newSf
21573 * @param[in] RgSchUlSf *oldSf
21574 * @param[in] RgSchUlAlloc *srcAlloc
21578 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf
21583 RgSchUlAlloc *srcAlloc
21586 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, srcAlloc)
21590 RgSchUlAlloc *srcAlloc;
21593 RgSchUlAlloc *alloc, *dstAlloc, *nxtAlloc;
21595 /* MS_WORKAROUND ccpu00120827 */
21596 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
21598 TRC2(rgSCHCmnUlInsAllocFrmNewSf2OldSf);
21600 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21604 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21605 /* If there is an overlap between alloc and srcAlloc
21606 * then alloc is marked for Adaptive retx and it is released
21608 if (rgSCHCmnUlAllocsOvrLap(alloc, srcAlloc) == TRUE)
21610 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21611 rgSCHUtlUlAllocRls(oldSf, alloc);
21613 /* No further allocs spanning the srcAlloc subbands */
21614 if (srcAlloc->sbStart + srcAlloc->numSb - 1 <= alloc->sbStart)
21618 } while ((alloc = nxtAlloc) != NULLP);
21621 /* After freeing all the colliding allocs, request for an allocation
21622 * specifying the start and numSb with in txSf. This function should
21623 * always return positively with a nonNULL dstAlloc */
21624 /* MS_WORKAROUND ccpu00120827 */
21625 remAllocs = schCmnCell->ul.maxAllocPerUlSf - *oldSf->allocCountRef;
21628 /* Fix : If oldSf already has max Allocs then release the
21629 * old RETX alloc to make space for new alloc of newSf.
21630 * newSf allocs(i.e new Msg3s) are given higher priority
21631 * over retx allocs. */
21632 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21636 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21637 if (!alloc->mrgdNewTxAlloc)
21639 /* If alloc is for RETX */
21640 /* TODO: Incase of this ad also in case of choosing
21641 * and alloc for ADAP RETX, we need to send ACK for
21642 * the corresponding alloc in PHICH */
21643 #ifndef EMTC_ENABLE
21644 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc);
21646 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc,FALSE);
21650 }while((alloc = nxtAlloc) != NULLP);
21653 dstAlloc = rgSCHUtlUlGetSpfcAlloc(oldSf, srcAlloc->sbStart, srcAlloc->numSb);
21655 /* This should never happen */
21656 if (dstAlloc == NULLP)
21658 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"CRNTI:%d "
21659 "rgSCHUtlUlGetSpfcAlloc failed in rgSCHCmnUlInsAllocFrmNewSf2OldSf",
21664 /* Copy the srcAlloc's state information in to dstAlloc */
21665 rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc);
21666 /* Set new Tx merged Alloc Flag to TRUE, indicating that this
21667 * alloc shall not be processed for non-adaptive retransmission */
21668 dstAlloc->mrgdNewTxAlloc = TRUE;
21672 * @brief Merge all allocations of newSf to oldSf.
21676 * Function : rgSCHCmnUlMergeSfAllocs
21678 * - Merge all allocations of newSf to oldSf.
21679 * - If newSf's alloc collides with oldSf's alloc
21680 * then oldSf's alloc is marked for adaptive Retx
21681 * and is released from oldSf to create space for
21684 * @param[in] RgSchCellCb *cell
21685 * @param[in] RgSchUlSf *oldSf
21686 * @param[in] RgSchUlSf *newSf
21690 PRIVATE Void rgSCHCmnUlMergeSfAllocs
21697 PRIVATE Void rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf)
21703 RgSchUlAlloc *alloc, *nxtAlloc;
21704 TRC2(rgSCHCmnUlMergeSfAllocs);
21707 /* Merge each alloc of newSf in to oldSf
21708 * and release it from newSf */
21709 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21713 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21714 rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, alloc);
21715 rgSCHUtlUlAllocRls(newSf, alloc);
21716 } while((alloc = nxtAlloc) != NULLP);
21721 * @brief Swap Hole/Alloc DB context of newSf and oldSf.
21725 * Function : rgSCHCmnUlSwapSfAllocs
21727 * - Swap Hole/Alloc DB context of newSf and oldSf.
21729 * @param[in] RgSchCellCb *cell
21730 * @param[in] RgSchUlSf *oldSf
21731 * @param[in] RgSchUlSf *newSf
21735 PRIVATE Void rgSCHCmnUlSwapSfAllocs
21742 PRIVATE Void rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf)
21748 RgSchUlAllocDb *tempAllocDb = newSf->allocDb;
21749 RgSchUlHoleDb *tempHoleDb = newSf->holeDb;
21750 U8 tempAvailSbs = newSf->availSubbands;
21752 TRC2(rgSCHCmnUlSwapSfAllocs);
21755 newSf->allocDb = oldSf->allocDb;
21756 newSf->holeDb = oldSf->holeDb;
21757 newSf->availSubbands = oldSf->availSubbands;
21759 oldSf->allocDb = tempAllocDb;
21760 oldSf->holeDb = tempHoleDb;
21761 oldSf->availSubbands = tempAvailSbs;
21763 /* Fix ccpu00120610*/
21764 newSf->allocCountRef = &newSf->allocDb->count;
21765 oldSf->allocCountRef = &oldSf->allocDb->count;
21769 * @brief Perform non-adaptive RETX for non-colliding allocs.
21773 * Function : rgSCHCmnUlPrcNonAdptRetx
21775 * - Perform non-adaptive RETX for non-colliding allocs.
21777 * @param[in] RgSchCellCb *cell
21778 * @param[in] RgSchUlSf *newSf
21779 * @param[in] U8 idx
21783 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx
21790 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx)
21796 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21797 RgSchUlAlloc *alloc, *nxtAlloc;
21798 TRC2(rgSCHCmnUlPrcNonAdptRetx);
21800 /* perform non-adaptive retx allocation(adjustment) */
21801 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21805 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21806 /* A merged new TX alloc, reset the state and skip */
21807 if (alloc->mrgdNewTxAlloc)
21809 alloc->mrgdNewTxAlloc = FALSE;
21814 rgSCHCmnUlNonadapRetx(cellUl, alloc, idx);
21816 } while((alloc = nxtAlloc) != NULLP);
21822 * @brief Update TX and RETX subframe's allocation
21827 * Function : rgSCHCmnUlPrfmSfMerge
21829 * - Release all preassigned allocations of newSf and merge
21831 * - If alloc of newSf collide with one or more allocs of oldSf
21832 * - mark all such allocs of oldSf for Adaptive Retx.
21833 * - Swap the alloc and hole DB references of oldSf and newSf.
21834 * - The allocs which did not collide with pre-assigned msg3
21835 * allocs are marked for non-adaptive RETX.
21837 * @param[in] RgSchCellCb *cell
21838 * @param[in] RgSchUlSf *oldSf
21839 * @param[in] RgSchUlSf *newSf
21840 * @param[in] U8 idx
21844 PRIVATE Void rgSCHCmnUlPrfmSfMerge
21852 PRIVATE Void rgSCHCmnUlPrfmSfMerge(cell, oldSf, newSf, idx)
21859 TRC2(rgSCHCmnUlPrfmSfMerge);
21860 /* Preassigned resources for msg3 in newSf.
21861 * Hence do adaptive retx for all NACKED TXs */
21862 rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf);
21863 /* swap alloc and hole DBs of oldSf and newSf. */
21864 rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf);
21865 /* Here newSf has the resultant merged allocs context */
21866 /* Perform non-adaptive RETX for non-colliding allocs */
21867 rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx);
21873 * @brief Update TX and RETX subframe's allocation
21878 * Function : rgSCHCmnUlRmvCmpltdAllocs
21880 * - Free all Transmission which are ACKED
21881 * OR for which MAX retransmission have
21885 * @param[in] RgSchCellCb *cell,
21886 * @param[in] RgSchUlSf *sf
21890 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs
21896 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs(cell, sf)
21901 RgSchUlAlloc *alloc, *nxtAlloc;
21902 TRC2(rgSCHCmnUlRmvCmpltdAllocs);
21904 if ((alloc = rgSCHUtlUlAllocFirst(sf)) == NULLP)
21910 nxtAlloc = rgSCHUtlUlAllocNxt(sf, alloc);
21912 printf("rgSCHCmnUlRmvCmpltdAllocs:time(%d %d) alloc->hqProc->remTx %d hqProcId(%d) \n",cell->crntTime.sfn,cell->crntTime.slot,alloc->hqProc->remTx, alloc->grnt.hqProcId);
21914 alloc->hqProc->rcvdCrcInd = TRUE;
21915 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
21918 /* SR_RACH_STATS : MSG 3 MAX RETX FAIL*/
21919 if ((alloc->forMsg3 == TRUE) && (alloc->hqProc->remTx == 0))
21921 rgNumMsg3FailMaxRetx++;
21923 cell->tenbStats->sch.msg3Fail++;
21927 #ifdef MAC_SCH_STATS
21928 if(alloc->ue != NULLP)
21930 /* access from ulHarqProc*/
21931 RgSchUeCb *ueCb = alloc->ue;
21932 RgSchCmnUe *cmnUe = (RgSchCmnUe*)ueCb->sch;
21933 RgSchCmnUlUe *ulUe = &(cmnUe->ul);
21934 U8 cqi = ulUe->crntUlCqi[0];
21935 U16 numUlRetx = ueCb->ul.hqEnt.maxHqRetx - alloc->hqProc->remTx;
21937 hqRetxStats.ulCqiStat[(cqi - 1)].mcs = alloc->grnt.iMcs;
21942 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1++;
21945 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2++;
21948 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3++;
21951 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4++;
21954 hqRetxStats.ulCqiStat[(cqi - 1)].totalTx = \
21955 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1 + \
21956 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2 * 2) + \
21957 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3 * 3) + \
21958 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4 * 4);
21961 #endif /*MAC_SCH_STATS*/
21962 rgSCHCmnUlFreeAllocation(cell, sf, alloc);
21964 /*ccpu00106104 MOD added check for AckNackRep */
21965 /*added check for acknack so that adaptive retx considers ue
21966 inactivity due to ack nack repetition*/
21967 else if((alloc->ue != NULLP) && (TRUE != alloc->forMsg3))
21969 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21970 rgSCHUtlUlAllocRls(sf, alloc);
21972 } while ((alloc = nxtAlloc) != NULLP);
21978 * @brief Update an uplink subframe.
21982 * Function : rgSCHCmnRlsUlSf
21984 * For each allocation
21985 * - if no more tx needed
21986 * - Release allocation
21988 * - Perform retransmission
21990 * @param[in] RgSchUlSf *sf
21991 * @param[in] U8 idx
21995 PUBLIC Void rgSCHCmnRlsUlSf
22001 PUBLIC Void rgSCHCmnRlsUlSf(cell, idx)
22006 TRC2(rgSCHCmnRlsUlSf);
22008 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22010 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
22012 RgSchUlSf *oldSf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
22014 /* Initialize the reTxLst of UL HqProcs for RETX subframe */
22015 if (rgSCHUtlUlAllocFirst(oldSf) == NULLP)
22019 /* Release all completed TX allocs from sf */
22020 rgSCHCmnUlRmvCmpltdAllocs(cell, oldSf);
22022 oldSf->numACqiCount = 0;
22028 * @brief Handle uplink allocation for retransmission.
22032 * Function : rgSCHCmnUlUpdAllocRetx
22034 * - Perform adaptive retransmission
22036 * @param[in] RgSchUlSf *sf
22037 * @param[in] RgSchUlAlloc *alloc
22041 PRIVATE Void rgSCHCmnUlUpdAllocRetx
22044 RgSchUlAlloc *alloc
22047 PRIVATE Void rgSCHCmnUlUpdAllocRetx(cell, alloc)
22049 RgSchUlAlloc *alloc;
22052 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
22054 TRC2(rgSCHCmnUlUpdAllocRetx);
22056 alloc->hqProc->reTxAlloc.rnti = alloc->rnti;
22057 alloc->hqProc->reTxAlloc.numSb = alloc->numSb;
22058 alloc->hqProc->reTxAlloc.iMcs = alloc->grnt.iMcs;
22060 alloc->hqProc->reTxAlloc.dciFrmt = alloc->grnt.dciFrmt;
22061 alloc->hqProc->reTxAlloc.numLyr = alloc->grnt.numLyr;
22062 alloc->hqProc->reTxAlloc.vrbgStart = alloc->grnt.vrbgStart;
22063 alloc->hqProc->reTxAlloc.numVrbg = alloc->grnt.numVrbg;
22064 alloc->hqProc->reTxAlloc.modOdr = alloc->grnt.modOdr;
22066 //iTbs = rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs);
22067 //iTbs = alloc->grnt.iMcs;
22068 //RGSCH_ARRAY_BOUND_CHECK( 0, rgTbSzTbl[0], iTbs);
22069 alloc->hqProc->reTxAlloc.tbSz = alloc->grnt.datSz;
22070 //rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1]/8;
22071 alloc->hqProc->reTxAlloc.ue = alloc->ue;
22072 alloc->hqProc->reTxAlloc.forMsg3 = alloc->forMsg3;
22073 alloc->hqProc->reTxAlloc.raCb = alloc->raCb;
22075 /* Set as retransmission is pending */
22076 alloc->hqProc->isRetx = TRUE;
22077 alloc->hqProc->alloc = NULLP;
22078 alloc->hqProc->ulSfIdx = RGSCH_INVALID_INFO;
22080 printf("Adding Harq Proc Id in the retx list hqProcId %d \n",alloc->grnt.hqProcId);
22082 cmLListAdd2Tail(&cmnUlCell->reTxLst, &alloc->hqProc->reTxLnk);
22083 alloc->hqProc->reTxLnk.node = (PTR)alloc->hqProc;
22088 * @brief Attempts allocation for msg3s for which ADAP retransmissions
22093 * Function : rgSCHCmnUlAdapRetxAlloc
22095 * Attempts allocation for msg3s for which ADAP retransmissions
22098 * @param[in] RgSchCellCb *cell
22099 * @param[in] RgSchUlSf *sf
22100 * @param[in] RgSchUlHqProcCb *proc;
22101 * @param[in] RgSchUlHole *hole;
22105 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc
22109 RgSchUlHqProcCb *proc,
22113 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole)
22116 RgSchUlHqProcCb *proc;
22120 U8 numSb = proc->reTxAlloc.numSb;
22121 U8 iMcs = proc->reTxAlloc.iMcs;
22122 CmLteTimingInfo frm = cell->crntTime;
22123 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22126 RgSchUlAlloc *alloc;
22127 TRC2(rgSCHCmnUlAdapRetxAlloc);
22129 /* Fetch PDCCH for msg3 */
22130 /* ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
22131 /* Introduced timing delta for UL control */
22132 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
22133 dlSf = rgSCHUtlSubFrmGet(cell, frm);
22134 pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
22135 if (pdcch == NULLP)
22140 /* Fetch UL Alloc for msg3 */
22141 if (numSb <= hole->num)
22143 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
22148 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22149 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
22150 "UL Alloc fail for msg3 retx for rnti: %d\n",
22151 proc->reTxAlloc.rnti);
22155 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
22156 alloc->grnt.iMcs = iMcs;
22157 alloc->grnt.datSz = proc->reTxAlloc.tbSz;
22160 //RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
22162 /* Fill UL Alloc for msg3 */
22163 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
22164 alloc->grnt.nDmrs = 0;
22165 alloc->grnt.hop = 0;
22166 alloc->grnt.delayBit = 0;
22167 alloc->grnt.isRtx = TRUE;
22168 proc->ulSfIdx = cellUl->schdIdx;
22170 proc->schdTime = cellUl->schdTime;
22171 alloc->grnt.hqProcId = proc->procId;
22172 alloc->grnt.dciFrmt = proc->reTxAlloc.dciFrmt;
22173 alloc->grnt.numLyr = proc->reTxAlloc.numLyr;
22174 alloc->grnt.vrbgStart = proc->reTxAlloc.vrbgStart;
22175 alloc->grnt.numVrbg = proc->reTxAlloc.numVrbg;
22176 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
22177 alloc->grnt.modOdr = proc->reTxAlloc.modOdr;
22179 /* TODO : Hardcoding these as of now */
22180 alloc->grnt.hop = 0;
22181 alloc->grnt.SCID = 0;
22182 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
22183 alloc->grnt.PMI = 0;
22184 alloc->grnt.uciOnxPUSCH = 0;
22186 alloc->rnti = proc->reTxAlloc.rnti;
22187 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22188 alloc->ue = proc->reTxAlloc.ue;
22189 alloc->pdcch = pdcch;
22190 alloc->forMsg3 = proc->reTxAlloc.forMsg3;
22191 alloc->raCb = proc->reTxAlloc.raCb;
22192 alloc->hqProc = proc;
22193 alloc->isAdaptive = TRUE;
22195 sf->totPrb += alloc->grnt.numRb;
22197 /* FIX : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22200 alloc->raCb->msg3Grnt= alloc->grnt;
22202 /* To the crntTime, add the time at which UE will
22203 * actually send MSG3 */
22204 alloc->raCb->msg3AllocTime = cell->crntTime;
22205 RGSCH_INCR_SUB_FRAME(alloc->raCb->msg3AllocTime, RG_SCH_CMN_MIN_RETXMSG3_RECP_INTRVL);
22207 alloc->raCb->msg3AllocTime = cellUl->schdTime;
22209 rgSCHCmnUlAdapRetx(alloc, proc);
22210 /* Fill PDCCH with alloc info */
22211 pdcch->rnti = alloc->rnti;
22212 pdcch->dci.dciFormat = TFU_DCI_FORMAT_0;
22213 pdcch->dci.u.format0Info.hoppingEnbld = alloc->grnt.hop;
22214 pdcch->dci.u.format0Info.rbStart = alloc->grnt.rbStart;
22215 pdcch->dci.u.format0Info.numRb = alloc->grnt.numRb;
22216 pdcch->dci.u.format0Info.mcs = alloc->grnt.iMcsCrnt;
22217 pdcch->dci.u.format0Info.ndi = alloc->hqProc->ndi;
22218 pdcch->dci.u.format0Info.nDmrs = alloc->grnt.nDmrs;
22219 pdcch->dci.u.format0Info.tpcCmd = alloc->grnt.tpc;
22223 /* ulIdx setting for cfg 0 shall be appropriately fixed thru ccpu00109015 */
22224 pdcch->dci.u.format0Info.ulIdx = RG_SCH_ULIDX_MSB;
22225 pdcch->dci.u.format0Info.dai = RG_SCH_MAX_DAI_IDX;
22228 pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_0];
22232 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue,cell);
22234 alloc->ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
22237 ue->ul.nPrb = alloc->grnt.numRb;
22239 ueUl->alloc.alloc = alloc;
22240 /* FIx: Removed the call to rgSCHCmnUlAdapRetx */
22241 rgSCHCmnUlUeFillAllocInfo(cell, alloc->ue);
22242 /* Setting csireq as false for Adaptive Retx*/
22243 ueUl->alloc.alloc->pdcch->dci.u.format0Info.cqiReq = RG_SCH_APCQI_NO;
22244 pdcch->dciNumOfBits = alloc->ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
22246 /* Reset as retransmission is done */
22247 proc->isRetx = FALSE;
22249 else /* Intg fix */
22251 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22252 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
22253 "Num SB not suffiecient for adap retx for rnti: %d",
22254 proc->reTxAlloc.rnti);
22260 /* Fix: syed Adaptive Msg3 Retx crash. */
22262 * @brief Releases all Adaptive Retx HqProcs which failed for
22263 * allocations in this scheduling occassion.
22267 * Function : rgSCHCmnUlSfRlsRetxProcs
22270 * @param[in] RgSchCellCb *cell
22271 * @param[in] RgSchUlSf *sf
22276 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs
22282 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs(cell, sf)
22289 RgSchUlHqProcCb *proc;
22290 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22292 TRC2(rgSCHCmnUlSfRlsRetxProcs);
22294 cp = &(cellUl->reTxLst);
22298 proc = (RgSchUlHqProcCb *)node->node;
22300 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22301 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22302 proc->reTxLnk.node = (PTR)NULLP;
22309 * @brief Attempts allocation for UEs for which retransmissions
22314 * Function : rgSCHCmnUlSfReTxAllocs
22316 * Attempts allocation for UEs for which retransmissions
22319 * @param[in] RgSchCellCb *cell
22320 * @param[in] RgSchUlSf *sf
22324 PRIVATE Void rgSCHCmnUlSfReTxAllocs
22330 PRIVATE Void rgSCHCmnUlSfReTxAllocs(cell, sf)
22337 RgSchUlHqProcCb *proc;
22340 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
22341 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22342 TRC2(rgSCHCmnUlSfReTxAllocs);
22344 cp = &(cellUl->reTxLst);
22348 proc = (RgSchUlHqProcCb *)node->node;
22349 ue = proc->reTxAlloc.ue;
22351 /*ccpu00106104 MOD added check for AckNackRep */
22352 /*added check for acknack so that adaptive retx considers ue
22353 inactivity due to ack nack repetition*/
22354 if((ue != NULLP) &&
22355 ((ue->measGapCb.isMeasuring == TRUE)||
22356 (ue->ackNakRepCb.isAckNakRep == TRUE)))
22360 /* Fix for ccpu00123917: Check if maximum allocs per UL sf have been exhausted */
22361 if (((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
22362 || (sf->allocDb->count == schCmnCell->ul.maxAllocPerUlSf))
22364 /* No more UL BW then return */
22367 /* perform adaptive retx for UE's */
22368 if (rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole) == FALSE)
22372 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22373 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22374 /* Fix: syed Adaptive Msg3 Retx crash. */
22375 proc->reTxLnk.node = (PTR)NULLP;
22381 * @brief Handles RB allocation for downlink.
22385 * Function : rgSCHCmnDlRbAlloc
22387 * Invoking Module Processing:
22388 * - This function is invoked for DL RB allocation
22390 * Processing Steps:
22391 * - If cell is frequency selecive,
22392 * - Call rgSCHDlfsAllocRb().
22394 * - Call rgSCHCmnNonDlfsRbAlloc().
22396 * @param[in] RgSchCellCb *cell
22397 * @param[in] RgSchDlRbAllocInfo *allocInfo
22402 PRIVATE Void rgSCHCmnDlRbAlloc
22405 RgSchCmnDlRbAllocInfo *allocInfo
22408 PRIVATE Void rgSCHCmnDlRbAlloc(cell, allocInfo)
22410 RgSchCmnDlRbAllocInfo *allocInfo;
22413 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
22414 TRC2(rgSCHCmnDlRbAlloc);
22416 if (cellSch->dl.isDlFreqSel)
22418 printf("5GTF_ERROR DLFS SCH Enabled\n");
22419 cellSch->apisDlfs->rgSCHDlfsAllocRb(cell, allocInfo);
22423 rgSCHCmnNonDlfsRbAlloc(cell, allocInfo);
22431 * @brief Determines number of RBGs and RBG subset sizes for the given DL
22432 * bandwidth and rbgSize
22435 * Function : rgSCHCmnDlGetRbgInfo
22438 * Processing Steps:
22439 * - Fill-up rbgInfo data structure for given DL bandwidth and rbgSize
22441 * @param[in] U8 dlTotalBw
22442 * @param[in] U8 dlSubsetBw
22443 * @param[in] U8 maxRaType1SubsetBw
22444 * @param[in] U8 rbgSize
22445 * @param[out] RgSchBwRbgInfo *rbgInfo
22449 PUBLIC Void rgSCHCmnDlGetRbgInfo
22453 U8 maxRaType1SubsetBw,
22455 RgSchBwRbgInfo *rbgInfo
22458 PUBLIC Void rgSCHCmnDlGetRbgInfo(dlTotalBw, dlSubsetBw, maxRaType1SubsetBw,
22462 U8 maxRaType1SubsetBw;
22464 RgSchBwRbgInfo *rbgInfo;
22467 #ifdef RGSCH_SPS_UNUSED
22469 U8 lastRbgIdx = ((dlTotalBw + rbgSize - 1)/rbgSize) - 1;
22470 U8 currRbgSize = rbgSize;
22471 U8 subsetSizeIdx = 0;
22472 U8 subsetSize[RG_SCH_NUM_RATYPE1_SUBSETS] = {0};
22473 U8 lastRbgSize = rbgSize - (dlTotalBw - ((dlTotalBw/rbgSize) * rbgSize));
22474 U8 numRaType1Rbgs = (maxRaType1SubsetBw + rbgSize - 1)/rbgSize;
22477 /* Compute maximum number of SPS RBGs for the cell */
22478 rbgInfo->numRbgs = ((dlSubsetBw + rbgSize - 1)/rbgSize);
22480 #ifdef RGSCH_SPS_UNUSED
22481 /* Distribute RBGs across subsets except last RBG */
22482 for (;idx < numRaType1Rbgs - 1; ++idx)
22484 subsetSize[subsetSizeIdx] += currRbgSize;
22485 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22488 /* Computation for last RBG */
22489 if (idx == lastRbgIdx)
22491 currRbgSize = lastRbgSize;
22493 subsetSize[subsetSizeIdx] += currRbgSize;
22494 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22497 /* Update the computed sizes */
22498 #ifdef RGSCH_SPS_UNUSED
22499 rbgInfo->lastRbgSize = currRbgSize;
22501 rbgInfo->lastRbgSize = rbgSize -
22502 (dlSubsetBw - ((dlSubsetBw/rbgSize) * rbgSize));
22503 #ifdef RGSCH_SPS_UNUSED
22504 cmMemcpy((U8 *)rbgInfo->rbgSubsetSize, (U8 *) subsetSize, 4 * sizeof(U8));
22506 rbgInfo->numRbs = (rbgInfo->numRbgs * rbgSize > dlTotalBw) ?
22507 dlTotalBw:(rbgInfo->numRbgs * rbgSize);
22508 rbgInfo->rbgSize = rbgSize;
22512 * @brief Handles RB allocation for Resource allocation type 0
22516 * Function : rgSCHCmnDlRaType0Alloc
22518 * Invoking Module Processing:
22519 * - This function is invoked for DL RB allocation for resource allocation
22522 * Processing Steps:
22523 * - Determine the available positions in the rbgMask.
22524 * - Allocate RBGs in the available positions.
22525 * - Update RA Type 0, RA Type 1 and RA type 2 masks.
22527 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22528 * @param[in] U8 rbsReq
22529 * @param[in] RgSchBwRbgInfo *rbgInfo
22530 * @param[out] U8 *numAllocRbs
22531 * @param[out] RgSchDlSfAllocInfo *resAllocInfo
22532 * @param[in] Bool isPartialAlloc
22538 PUBLIC U8 rgSCHCmnDlRaType0Alloc
22540 RgSchDlSfAllocInfo *allocedInfo,
22542 RgSchBwRbgInfo *rbgInfo,
22544 RgSchDlSfAllocInfo *resAllocInfo,
22545 Bool isPartialAlloc
22548 PUBLIC U8 rgSCHCmnDlRaType0Alloc(allocedInfo, rbsReq, rbgInfo,
22549 numAllocRbs, resAllocInfo, isPartialAlloc)
22550 RgSchDlSfAllocInfo *allocedInfo;
22552 RgSchBwRbgInfo *rbgInfo;
22554 RgSchDlSfAllocInfo *resAllocInfo;
22555 Bool isPartialAlloc;
22558 /* Note: This function atttempts allocation only full allocation */
22559 U32 remNumRbs, rbgPosInRbgMask, ueRaType2Mask;
22560 U8 type2MaskIdx, cnt, rbIdx;
22562 U8 bestNumAvailRbs = 0;
22564 U8 numAllocRbgs = 0;
22565 U8 rbgSize = rbgInfo->rbgSize;
22566 U32 *rbgMask = &(resAllocInfo->raType0Mask);
22567 #ifdef RGSCH_SPS_UNUSED
22570 U32 *raType1Mask = resAllocInfo->raType1Mask;
22571 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22573 U32 *raType2Mask = resAllocInfo->raType2Mask;
22575 U32 allocedMask = allocedInfo->raType0Mask;
22577 maskSize = rbgInfo->numRbgs;
22580 RG_SCH_CMN_DL_COUNT_ONES(allocedMask, maskSize, &usedRbs);
22581 if (maskSize == usedRbs)
22583 /* All RBGs are allocated, including the last one */
22588 remNumRbs = (maskSize - usedRbs - 1) * rbgSize; /* vamsee: removed minus 1 */
22590 /* If last RBG is available, add last RBG size */
22591 if (!(allocedMask & (1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(maskSize - 1))))
22593 remNumRbs += rbgInfo->lastRbgSize;
22597 /* If complete allocation is needed, check if total requested RBs are available else
22598 * check the best available RBs */
22599 if (!isPartialAlloc)
22601 if (remNumRbs >= rbsReq)
22603 bestNumAvailRbs = rbsReq;
22608 bestNumAvailRbs = remNumRbs > rbsReq ? rbsReq : remNumRbs;
22611 /* Allocate for bestNumAvailRbs */
22612 if (bestNumAvailRbs)
22614 for (rbg = 0; rbg < maskSize - 1; ++rbg)
22616 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22617 if (!(allocedMask & rbgPosInRbgMask))
22619 /* Update RBG mask */
22620 *rbgMask |= rbgPosInRbgMask;
22622 /* Compute RB index of the first RB of the RBG allocated */
22623 rbIdx = rbg * rbgSize;
22625 for (cnt = 0; cnt < rbgSize; ++cnt)
22627 #ifdef RGSCH_SPS_UNUSED
22628 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22630 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22631 #ifdef RGSCH_SPS_UNUSED
22632 /* Update RBG mask for RA type 1 */
22633 raType1Mask[rbgSubset] |= ueRaType1Mask;
22634 raType1UsedRbs[rbgSubset]++;
22636 /* Update RA type 2 mask */
22637 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22640 *numAllocRbs += rbgSize;
22641 remNumRbs -= rbgSize;
22643 if (*numAllocRbs >= bestNumAvailRbs)
22649 /* If last RBG available and allocation is not completed, allocate
22651 if (*numAllocRbs < bestNumAvailRbs)
22653 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22654 *rbgMask |= rbgPosInRbgMask;
22655 *numAllocRbs += rbgInfo->lastRbgSize;
22657 /* Compute RB index of the first RB of the last RBG */
22658 rbIdx = ((rbgInfo->numRbgs - 1 ) * rbgSize ); /* removed minus 1 vamsee */
22660 for (cnt = 0; cnt < rbgInfo->lastRbgSize; ++cnt)
22662 #ifdef RGSCH_SPS_UNUSED
22663 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22665 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22666 #ifdef RGSCH_SPS_UNUSED
22667 /* Update RBG mask for RA type 1 */
22668 raType1Mask[rbgSubset] |= ueRaType1Mask;
22669 raType1UsedRbs[rbgSubset]++;
22671 /* Update RA type 2 mask */
22672 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22675 remNumRbs -= rbgInfo->lastRbgSize;
22678 /* Note: this should complete allocation, not checking for the
22682 RETVALUE(numAllocRbgs);
22685 #ifdef RGSCH_SPS_UNUSED
22687 * @brief Handles RB allocation for Resource allocation type 1
22691 * Function : rgSCHCmnDlRaType1Alloc
22693 * Invoking Module Processing:
22694 * - This function is invoked for DL RB allocation for resource allocation
22697 * Processing Steps:
22698 * - Determine the available positions in the subsets.
22699 * - Allocate RB in the available subset.
22700 * - Update RA Type1, RA type 0 and RA type 2 masks.
22702 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22703 * @param[in] U8 rbsReq
22704 * @param[in] RgSchBwRbgInfo *rbgInfo
22705 * @param[in] U8 startRbgSubset
22706 * @param[in] U8 *allocRbgSubset
22707 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22708 * @param[in] Bool isPartialAlloc
22711 * Number of allocated RBs
22715 PUBLIC U8 rgSCHCmnDlRaType1Alloc
22717 RgSchDlSfAllocInfo *allocedInfo,
22719 RgSchBwRbgInfo *rbgInfo,
22721 U8 *allocRbgSubset,
22722 RgSchDlSfAllocInfo *resAllocInfo,
22723 Bool isPartialAlloc
22726 PUBLIC U8 rgSCHCmnDlRaType1Alloc(allocedInfo, rbsReq,rbgInfo,startRbgSubset,
22727 allocRbgSubset, resAllocInfo, isPartialAlloc)
22728 RgSchDlSfAllocInfo *allocedInfo;
22730 RgSchBwRbgInfo *rbgInfo;
22732 U8 *allocRbgSubset;
22733 RgSchDlSfAllocInfo *resAllocInfo;
22734 Bool isPartialAlloc;
22737 /* Note: This function atttempts only full allocation */
22738 U8 *rbgSubsetSzArr;
22739 U8 type2MaskIdx, subsetIdx, rbIdx, rbInSubset, rbgInSubset;
22740 U8 offset, rbg, maskSize, bestSubsetIdx;
22742 U8 bestNumAvailRbs = 0;
22743 U8 numAllocRbs = 0;
22744 U32 ueRaType2Mask, ueRaType0Mask, rbPosInSubset;
22745 U32 remNumRbs, allocedMask;
22747 U8 rbgSize = rbgInfo->rbgSize;
22748 U8 rbgSubset = startRbgSubset;
22749 U32 *rbgMask = &resAllocInfo->raType0Mask;
22750 U32 *raType1Mask = resAllocInfo->raType1Mask;
22751 U32 *raType2Mask = resAllocInfo->raType2Mask;
22752 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22753 U32 *allocMask = allocedInfo->raType1Mask;
22755 /* Initialize the subset size Array */
22756 rbgSubsetSzArr = rbgInfo->rbgSubsetSize;
22758 /* Perform allocation for RA type 1 */
22759 for (subsetIdx = 0;subsetIdx < rbgSize; ++subsetIdx)
22761 allocedMask = allocMask[rbgSubset];
22762 maskSize = rbgSubsetSzArr[rbgSubset];
22764 /* Determine number of available RBs in the subset */
22765 usedRbs = allocedInfo->raType1UsedRbs[subsetIdx];
22766 remNumRbs = maskSize - usedRbs;
22768 if (remNumRbs >= rbsReq)
22770 bestNumAvailRbs = rbsReq;
22771 bestSubsetIdx = rbgSubset;
22774 else if (isPartialAlloc && (remNumRbs > bestNumAvailRbs))
22776 bestNumAvailRbs = remNumRbs;
22777 bestSubsetIdx = rbgSubset;
22780 rbgSubset = (rbgSubset + 1) % rbgSize;
22781 } /* End of for (each rbgsubset) */
22783 if (bestNumAvailRbs)
22785 /* Initialize alloced mask and subsetSize depending on the RBG
22786 * subset of allocation */
22788 maskSize = rbgSubsetSzArr[bestSubsetIdx];
22789 allocedMask = allocMask[bestSubsetIdx];
22790 RG_SCH_CMN_DL_GET_START_POS(allocedMask, maskSize,
22792 for (; startIdx < rbgSize; ++startIdx, ++startPos)
22794 for (rbInSubset = startPos; rbInSubset < maskSize;
22795 rbInSubset = rbInSubset + rbgSize)
22797 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
22798 if (!(allocedMask & rbPosInSubset))
22800 raType1Mask[bestSubsetIdx] |= rbPosInSubset;
22801 raType1UsedRbs[bestSubsetIdx]++;
22803 /* Compute RB index value for the RB being allocated */
22804 rbgInSubset = rbInSubset /rbgSize;
22805 offset = rbInSubset % rbgSize;
22806 rbg = (rbgInSubset * rbgSize) + bestSubsetIdx;
22807 rbIdx = (rbg * rbgSize) + offset;
22809 /* Update RBG mask for RA type 0 allocation */
22810 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22811 *rbgMask |= ueRaType0Mask;
22813 /* Update RA type 2 mask */
22814 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22815 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22817 /* Update the counters */
22820 if (numAllocRbs == bestNumAvailRbs)
22825 } /* End of for (each position in the subset mask) */
22826 if (numAllocRbs == bestNumAvailRbs)
22830 } /* End of for startIdx = 0 to rbgSize */
22832 *allocRbgSubset = bestSubsetIdx;
22833 } /* End of if (bestNumAvailRbs) */
22835 RETVALUE(numAllocRbs);
22839 * @brief Handles RB allocation for Resource allocation type 2
22843 * Function : rgSCHCmnDlRaType2Alloc
22845 * Invoking Module Processing:
22846 * - This function is invoked for DL RB allocation for resource allocation
22849 * Processing Steps:
22850 * - Determine the available positions in the mask
22851 * - Allocate best fit cosecutive RBs.
22852 * - Update RA Type2, RA type 1 and RA type 0 masks.
22854 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22855 * @param[in] U8 rbsReq
22856 * @param[in] RgSchBwRbgInfo *rbgInfo
22857 * @param[out] U8 *rbStart
22858 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22859 * @param[in] Bool isPartialAlloc
22862 * Number of allocated RBs
22866 PUBLIC U8 rgSCHCmnDlRaType2Alloc
22868 RgSchDlSfAllocInfo *allocedInfo,
22870 RgSchBwRbgInfo *rbgInfo,
22872 RgSchDlSfAllocInfo *resAllocInfo,
22873 Bool isPartialAlloc
22876 PUBLIC U8 rgSCHCmnDlRaType2Alloc(allocedInfo, rbsReq, rbgInfo, rbStart,
22877 resAllocInfo, isPartialAlloc)
22878 RgSchDlSfAllocInfo *allocedInfo;
22880 RgSchBwRbgInfo *rbgInfo;
22882 RgSchDlSfAllocInfo *resAllocInfo;
22883 Bool isPartialAlloc;
22886 U8 numAllocRbs = 0;
22888 U8 rbgSize = rbgInfo->rbgSize;
22889 U32 *rbgMask = &resAllocInfo->raType0Mask;
22890 #ifdef RGSCH_SPS_UNUSED
22891 U32 *raType1Mask = resAllocInfo->raType1Mask;
22893 U32 *raType2Mask = resAllocInfo->raType2Mask;
22894 #ifdef RGSCH_SPS_UNUSED
22895 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22897 U32 *allocedMask = allocedInfo->raType2Mask;
22899 /* Note: This function atttempts only full allocation */
22900 rgSCHCmnDlGetBestFitHole(allocedMask, rbgInfo->numRbs,
22901 raType2Mask, rbsReq, rbStart, &numAllocRbs, isPartialAlloc);
22904 /* Update the allocation in RA type 0 and RA type 1 masks */
22905 U8 rbCnt = numAllocRbs;
22906 #ifdef RGSCH_SPS_UNUSED
22915 /* Update RBG mask for RA type 0 allocation */
22916 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22917 *rbgMask |= ueRaType0Mask;
22919 #ifdef RGSCH_SPS_UNUSED
22920 /* Update RBG mask for RA type 1 */
22921 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22922 raType1Mask[rbgSubset] |= ueRaType1Mask;
22923 raType1UsedRbs[rbgSubset]++;
22925 /* Update the counters */
22931 RETVALUE(numAllocRbs);
22935 * @brief Determines RA type 0 mask from given RB index.
22939 * Function : rgSCHCmnGetRaType0Mask
22942 * Processing Steps:
22943 * - Determine RA Type 0 mask for given rbIdex and rbg size.
22945 * @param[in] U8 rbIdx
22946 * @param[in] U8 rbgSize
22947 * @return U32 RA type 0 mask
22950 PRIVATE U32 rgSCHCmnGetRaType0Mask
22956 PRIVATE U32 rgSCHCmnGetRaType0Mask(rbIdx, rbgSize)
22962 U32 rbgPosInRbgMask = 0;
22964 rbg = rbIdx/rbgSize;
22965 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22967 RETVALUE(rbgPosInRbgMask);
22970 #ifdef RGSCH_SPS_UNUSED
22972 * @brief Determines RA type 1 mask from given RB index.
22976 * Function : rgSCHCmnGetRaType1Mask
22979 * Processing Steps:
22980 * - Determine RA Type 1 mask for given rbIdex and rbg size.
22982 * @param[in] U8 rbIdx
22983 * @param[in] U8 rbgSize
22984 * @param[out] U8 *type1Subset
22985 * @return U32 RA type 1 mask
22988 PRIVATE U32 rgSCHCmnGetRaType1Mask
22995 PRIVATE U32 rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, type1Subset)
23001 U8 rbg, rbgSubset, rbgInSubset, offset, rbInSubset;
23004 rbg = rbIdx/rbgSize;
23005 rbgSubset = rbg % rbgSize;
23006 rbgInSubset = rbg/rbgSize;
23007 offset = rbIdx % rbgSize;
23008 rbInSubset = rbgInSubset * rbgSize + offset;
23009 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
23011 *type1Subset = rbgSubset;
23012 RETVALUE(rbPosInSubset);
23014 #endif /* RGSCH_SPS_UNUSED */
23016 * @brief Determines RA type 2 mask from given RB index.
23020 * Function : rgSCHCmnGetRaType2Mask
23023 * Processing Steps:
23024 * - Determine RA Type 2 mask for given rbIdx and rbg size.
23026 * @param[in] U8 rbIdx
23027 * @param[out] U8 *maskIdx
23028 * @return U32 RA type 2 mask
23031 PRIVATE U32 rgSCHCmnGetRaType2Mask
23037 PRIVATE U32 rgSCHCmnGetRaType2Mask(rbIdx, maskIdx)
23044 *maskIdx = rbIdx / 32;
23045 rbPosInType2 = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbIdx % 32);
23047 RETVALUE(rbPosInType2);
23051 * @brief Performs resource allocation for a non-SPS UE in SPS bandwidth
23055 * Function : rgSCHCmnAllocUeInSpsBw
23058 * Processing Steps:
23059 * - Determine allocation for the UE.
23060 * - Use resource allocation type 0, 1 and 2 for allocation
23061 * within maximum SPS bandwidth.
23063 * @param[in] RgSchDlSf *dlSf
23064 * @param[in] RgSchCellCb *cell
23065 * @param[in] RgSchUeCb *ue
23066 * @param[in] RgSchDlRbAlloc *rbAllocInfo
23067 * @param[in] Bool isPartialAlloc
23073 PUBLIC Bool rgSCHCmnAllocUeInSpsBw
23078 RgSchDlRbAlloc *rbAllocInfo,
23079 Bool isPartialAlloc
23082 PUBLIC Bool rgSCHCmnAllocUeInSpsBw(dlSf, cell, ue, rbAllocInfo, isPartialAlloc)
23086 RgSchDlRbAlloc *rbAllocInfo;
23087 Bool isPartialAlloc;
23090 U8 rbgSize = cell->rbgSize;
23091 U8 numAllocRbs = 0;
23092 U8 numAllocRbgs = 0;
23094 U8 idx, noLyr, iTbs;
23095 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
23096 RgSchDlSfAllocInfo *dlSfAlloc = &rbAllocInfo->dlSf->dlSfAllocInfo;
23097 RgSchBwRbgInfo *spsRbgInfo = &cell->spsBwRbgInfo;
23099 /* SPS_FIX : Check if this Hq proc is scheduled */
23100 if ((0 == rbAllocInfo->tbInfo[0].schdlngForTb) &&
23101 (0 == rbAllocInfo->tbInfo[1].schdlngForTb))
23106 /* Check if the requirement can be accomodated in SPS BW */
23107 if (dlSf->spsAllocdBw == spsRbgInfo->numRbs)
23109 /* SPS Bandwidth has been exhausted: no further allocations possible */
23112 if (!isPartialAlloc)
23114 if((dlSf->spsAllocdBw + rbAllocInfo->rbsReq) > spsRbgInfo->numRbs)
23120 /* Perform allocation for RA type 0 if rbsReq is multiple of RBG size (also
23121 * if RBG size = 1) */
23122 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23124 rbAllocInfo->rbsReq += (rbgSize - rbAllocInfo->rbsReq % rbgSize);
23125 numAllocRbgs = rgSCHCmnDlRaType0Alloc(dlSfAlloc,
23126 rbAllocInfo->rbsReq, spsRbgInfo, &numAllocRbs,
23127 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23129 #ifdef RGSCH_SPS_UNUSED
23130 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23132 /* If no RBS could be allocated, attempt RA TYPE 1 */
23134 numAllocRbs = rgSCHCmnDlRaType1Alloc(dlSfAlloc,
23135 rbAllocInfo->rbsReq, spsRbgInfo, (U8)dlSfAlloc->nxtRbgSubset,
23136 &rbAllocInfo->allocInfo.raType1.rbgSubset,
23137 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23141 dlSfAlloc->nxtRbgSubset =
23142 (rbAllocInfo->allocInfo.raType1.rbgSubset + 1 ) % rbgSize;
23146 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23148 numAllocRbs = rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23149 rbAllocInfo->rbsReq, spsRbgInfo,
23150 &rbStart, &rbAllocInfo->resAllocInfo, isPartialAlloc);
23157 if (!(rbAllocInfo->pdcch =
23158 rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi,\
23159 rbAllocInfo->dciFormat, FALSE)))
23161 /* Note: Returning TRUE since PDCCH might be available for another UE */
23165 /* Update Tb info for each scheduled TB */
23166 iTbs = rbAllocInfo->tbInfo[0].iTbs;
23167 noLyr = rbAllocInfo->tbInfo[0].noLyr;
23168 rbAllocInfo->tbInfo[0].bytesAlloc =
23169 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;
23171 if (rbAllocInfo->tbInfo[1].schdlngForTb)
23173 iTbs = rbAllocInfo->tbInfo[1].iTbs;
23174 noLyr = rbAllocInfo->tbInfo[1].noLyr;
23175 rbAllocInfo->tbInfo[1].bytesAlloc =
23176 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;;
23179 /* Update rbAllocInfo with the allocation information */
23180 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23182 rbAllocInfo->allocInfo.raType0.dlAllocBitMask =
23183 rbAllocInfo->resAllocInfo.raType0Mask;
23184 rbAllocInfo->allocInfo.raType0.numDlAlloc = numAllocRbgs;
23186 #ifdef RGSCH_SPS_UNUSED
23187 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23189 rbAllocInfo->allocInfo.raType1.dlAllocBitMask =
23190 rbAllocInfo->resAllocInfo.raType1Mask[rbAllocInfo->allocInfo.raType1.rbgSubset];
23191 rbAllocInfo->allocInfo.raType1.numDlAlloc = numAllocRbs;
23192 rbAllocInfo->allocInfo.raType1.shift = 0;
23195 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23197 rbAllocInfo->allocInfo.raType2.isLocal = TRUE;
23198 rbAllocInfo->allocInfo.raType2.rbStart = rbStart;
23199 rbAllocInfo->allocInfo.raType2.numRb = numAllocRbs;
23202 rbAllocInfo->rbsAlloc = numAllocRbs;
23203 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
23205 /* Update allocation masks for RA types 0, 1 and 2 in DL SF */
23207 /* Update type 0 allocation mask */
23208 dlSfAlloc->raType0Mask |= rbAllocInfo->resAllocInfo.raType0Mask;
23209 #ifdef RGSCH_SPS_UNUSED
23210 /* Update type 1 allocation masks */
23211 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
23213 dlSfAlloc->raType1Mask[idx] |= rbAllocInfo->resAllocInfo.raType1Mask[idx];
23214 dlSfAlloc->raType1UsedRbs[idx] +=
23215 rbAllocInfo->resAllocInfo.raType1UsedRbs[idx];
23218 /* Update type 2 allocation masks */
23219 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
23221 dlSfAlloc->raType2Mask[idx] |= rbAllocInfo->resAllocInfo.raType2Mask[idx];
23224 dlSf->spsAllocdBw += numAllocRbs;
23228 /***********************************************************
23230 * Func : rgSCHCmnDlGetBestFitHole
23233 * Desc : Converts the best fit hole into allocation and returns the
23234 * allocation information.
23244 **********************************************************/
23246 PRIVATE Void rgSCHCmnDlGetBestFitHole
23250 U32 *crntAllocMask,
23254 Bool isPartialAlloc
23257 PRIVATE Void rgSCHCmnDlGetBestFitHole (allocMask, numMaskRbs,
23258 crntAllocMask, rbsReq, allocStart, allocNumRbs, isPartialAlloc)
23261 U32 *crntAllocMask;
23265 Bool isPartialAlloc;
23268 U8 maskSz = (numMaskRbs + 31)/32;
23269 U8 maxMaskPos = (numMaskRbs % 32);
23270 U8 maskIdx, maskPos;
23271 U8 numAvailRbs = 0;
23272 U8 bestAvailNumRbs = 0;
23273 S8 bestStartPos = -1;
23275 U32 tmpMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23276 U32 bestMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23278 *allocNumRbs = numAvailRbs;
23281 for (maskIdx = 0; maskIdx < maskSz; ++maskIdx)
23284 if (maskIdx == (maskSz - 1))
23286 if (numMaskRbs % 32)
23288 maxMaskPos = numMaskRbs % 32;
23291 for (maskPos = 0; maskPos < maxMaskPos; ++maskPos)
23293 if (!(allocMask[maskIdx] & (1 << (31 - maskPos))))
23295 tmpMask[maskIdx] |= (1 << (31 - maskPos));
23296 if (startPos == -1)
23298 startPos = maskIdx * 32 + maskPos;
23301 if (numAvailRbs == rbsReq)
23303 *allocStart = (U8)startPos;
23304 *allocNumRbs = rbsReq;
23310 if (numAvailRbs > bestAvailNumRbs)
23312 bestAvailNumRbs = numAvailRbs;
23313 bestStartPos = startPos;
23314 cmMemcpy((U8 *)bestMask, (U8 *) tmpMask, 4 * sizeof(U32));
23318 cmMemset((U8 *)tmpMask, 0, 4 * sizeof(U32));
23321 if (*allocNumRbs == rbsReq)
23327 if (*allocNumRbs == rbsReq)
23329 /* Convert the hole into allocation */
23330 cmMemcpy((U8 *)crntAllocMask, (U8 *) tmpMask, 4 * sizeof(U32));
23335 if (bestAvailNumRbs && isPartialAlloc)
23337 /* Partial allocation could have been done */
23338 *allocStart = (U8)bestStartPos;
23339 *allocNumRbs = bestAvailNumRbs;
23340 /* Convert the hole into allocation */
23341 cmMemcpy((U8 *)crntAllocMask, (U8 *) bestMask, 4 * sizeof(U32));
23347 #endif /* LTEMAC_SPS */
23349 /***************************************************************************
23351 * NON-DLFS Allocation functions
23353 * *************************************************************************/
23357 * @brief Function to find out code rate
23361 * Function : rgSCHCmnFindCodeRate
23363 * Processing Steps:
23365 * @param[in] RgSchCellCb *cell
23366 * @param[in] RgSchDlSf *dlSf
23367 * @param[in,out] RgSchDlRbAlloc *allocInfo
23372 PRIVATE Void rgSCHCmnFindCodeRate
23376 RgSchDlRbAlloc *allocInfo,
23380 PRIVATE Void rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,idx)
23383 RgSchDlRbAlloc *allocInfo;
23392 /* Adjust the Imcs and bytes allocated also with respect to the adjusted
23393 RBs - Here we will find out the Imcs by identifying first Highest
23394 number of bits compared to the original bytes allocated. */
23396 * @brief Adjust IMCS according to tbSize and ITBS
23400 * Function : rgSCHCmnNonDlfsPbchTbImcsAdj
23402 * Processing Steps:
23403 * - Adjust Imcs according to tbSize and ITBS.
23405 * @param[in,out] RgSchDlRbAlloc *allocInfo
23406 * @param[in] U8 *idx
23410 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj
23413 RgSchDlRbAlloc *allocInfo,
23418 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj(cell,allocInfo, idx, rbsReq)
23420 RgSchDlRbAlloc *allocInfo;
23430 RgSchDlSf *dlSf = allocInfo->dlSf;
23432 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23433 noLyrs = allocInfo->tbInfo[idx].noLyr;
23435 if((allocInfo->raType == RG_SCH_CMN_RA_TYPE0))
23437 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + dlSf->lstRbgDfct), cell->rbgSize);
23438 noRbs = (noRbgs * cell->rbgSize) - dlSf->lstRbgDfct;
23442 noRbs = allocInfo->rbsReq;
23445 /* This line will help in case if tbs is zero and reduction in MCS is not possible */
23446 if (allocInfo->rbsReq == 0 )
23450 origBytesReq = rgTbSzTbl[noLyrs - 1][tbs][rbsReq - 1]/8;
23452 /* Find out the ITbs & Imcs by identifying first Highest
23453 number of bits compared to the original bytes allocated.*/
23456 if(((rgTbSzTbl[noLyrs - 1][0][noRbs - 1])/8) < origBytesReq)
23458 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyrs - 1], tbs);
23459 while(((rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1])/8) > origBytesReq)
23468 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1]/8;
23469 allocInfo->tbInfo[idx].iTbs = tbs;
23470 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23475 /* Added funcion to adjust TBSize*/
23477 * @brief Function to adjust the tbsize in case of subframe 0 & 5 when
23478 * we were not able to do RB alloc adjustment by adding extra required Rbs
23482 * Function : rgSCHCmnNonDlfsPbchTbSizeAdj
23484 * Processing Steps:
23486 * @param[in,out] RgSchDlRbAlloc *allocInfo
23487 * @param[in] U8 numOvrlapgPbchRb
23488 * @param[in] U8 idx
23489 * @param[in] U8 pbchSsRsSym
23493 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj
23495 RgSchDlRbAlloc *allocInfo,
23496 U8 numOvrlapgPbchRb,
23502 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,idx,bytesReq)
23503 RgSchDlRbAlloc *allocInfo;
23504 U8 numOvrlapgPbchRb;
23510 U32 reducedTbs = 0;
23514 noLyrs = allocInfo->tbInfo[idx].noLyr;
23516 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23518 reducedTbs = bytesReq - (((U32)numOvrlapgPbchRb * (U32)pbchSsRsSym * 6)/8);
23520 /* find out the ITbs & Imcs by identifying first Highest
23521 number of bits compared with reduced bits considering the bits that are
23522 reserved for PBCH/PSS/SSS */
23523 if(((rgTbSzTbl[noLyrs - 1][0][allocInfo->rbsReq - 1])/8) < reducedTbs)
23525 while(((rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1])/8) > reducedTbs)
23534 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1]/8;
23535 allocInfo->tbInfo[idx].iTbs = tbs;
23536 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23541 /* Added this function to find num of ovrlapping PBCH rb*/
23543 * @brief Function to find out how many additional rbs are available
23544 * in the entire bw which can be allocated to a UE
23547 * Function : rgSCHCmnFindNumAddtlRbsAvl
23549 * Processing Steps:
23550 * - Calculates number of additinal rbs available
23552 * @param[in] RgSchCellCb *cell
23553 * @param[in] RgSchDlSf *dlSf
23554 * @param[in,out] RgSchDlRbAlloc *allocInfo
23555 * @param[out] U8 addtlRbsAvl
23559 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl
23563 RgSchDlRbAlloc *allocInfo
23566 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl(cell,dlSf,allocInfo)
23569 RgSchDlRbAlloc *allocInfo;
23572 U8 addtlRbsAvl = 0;
23574 TRC2(rgSCHCmnFindNumAddtlRbsAvl)
23576 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23578 addtlRbsAvl = (((dlSf->type0End - dlSf->type2End + 1)*\
23579 cell->rbgSize) - dlSf->lstRbgDfct) - allocInfo->rbsReq;
23581 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23583 addtlRbsAvl = (dlSf->bw - dlSf->bwAlloced) - allocInfo->rbsReq;
23586 RETVALUE(addtlRbsAvl);
23589 /* Added this function to find num of ovrlapping PBCH rb*/
23591 * @brief Function to find out how many of the requested RBs are
23592 * falling in the center 6 RBs of the downlink bandwidth.
23595 * Function : rgSCHCmnFindNumPbchOvrlapRbs
23597 * Processing Steps:
23598 * - Calculates number of overlapping rbs
23600 * @param[in] RgSchCellCb *cell
23601 * @param[in] RgSchDlSf *dlSf
23602 * @param[in,out] RgSchDlRbAlloc *allocInfo
23603 * @param[out] U8* numOvrlapgPbchRb
23607 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs
23611 RgSchDlRbAlloc *allocInfo,
23612 U8 *numOvrlapgPbchRb
23615 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,numOvrlapgPbchRb)
23618 RgSchDlRbAlloc *allocInfo;
23619 U8 *numOvrlapgPbchRb;
23622 *numOvrlapgPbchRb = 0;
23623 TRC2(rgSCHCmnFindNumPbchOvrlapRbs)
23624 /*Find if we have already crossed the start boundary for PBCH 6 RBs,
23625 * if yes then lets find the number of RBs which are getting overlapped
23626 * with this allocation.*/
23627 if(dlSf->bwAlloced <= (cell->pbchRbStart))
23629 /*We have not crossed the start boundary of PBCH RBs. Now we need
23630 * to know that if take this allocation then how much PBCH RBs
23631 * are overlapping with this allocation.*/
23632 /* Find out the overlapping RBs in the centre 6 RBs */
23633 if((dlSf->bwAlloced + allocInfo->rbsReq) > cell->pbchRbStart)
23635 *numOvrlapgPbchRb = (dlSf->bwAlloced + allocInfo->rbsReq) - (cell->pbchRbStart);
23636 if(*numOvrlapgPbchRb > 6)
23637 *numOvrlapgPbchRb = 6;
23640 else if ((dlSf->bwAlloced > (cell->pbchRbStart)) &&
23641 (dlSf->bwAlloced < (cell->pbchRbEnd)))
23643 /*We have already crossed the start boundary of PBCH RBs.We need to
23644 * find that if we take this allocation then how much of the RBs for
23645 * this allocation will overlap with PBCH RBs.*/
23646 /* Find out the overlapping RBs in the centre 6 RBs */
23647 if(dlSf->bwAlloced + allocInfo->rbsReq < (cell->pbchRbEnd))
23649 /*If we take this allocation then also we are not crossing the
23650 * end boundary of PBCH 6 RBs.*/
23651 *numOvrlapgPbchRb = allocInfo->rbsReq;
23655 /*If we take this allocation then we are crossing the
23656 * end boundary of PBCH 6 RBs.*/
23657 *numOvrlapgPbchRb = (cell->pbchRbEnd) - dlSf->bwAlloced;
23664 * @brief Performs RB allocation adjustment if the requested RBs are
23665 * falling in the center 6 RBs of the downlink bandwidth.
23668 * Function : rgSCHCmnNonDlfsPbchRbAllocAdj
23670 * Processing Steps:
23671 * - Allocate consecutively available RBs.
23673 * @param[in] RgSchCellCb *cell
23674 * @param[in,out] RgSchDlRbAlloc *allocInfo
23675 * @param[in] U8 pbchSsRsSym
23679 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj
23682 RgSchDlRbAlloc *allocInfo,
23687 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo,pbchSsRsSym)
23689 RgSchDlRbAlloc *allocInfo;
23694 RgSchDlSf *dlSf = allocInfo->dlSf;
23695 U8 numOvrlapgPbchRb = 0;
23696 U8 numOvrlapgAdtlPbchRb = 0;
23698 U8 addtlRbsReq = 0;
23699 U8 moreAddtlRbsReq = 0;
23700 U8 addtlRbsAdd = 0;
23701 U8 moreAddtlRbsAdd = 0;
23709 TRC2(rgSCHCmnNonDlfsPbchRbAllocAdj);
23712 origRbsReq = allocInfo->rbsReq;
23713 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23715 totSym = (cell->isCpDlExtend) ? RGSCH_TOT_NUM_SYM_EXTCP : RGSCH_TOT_NUM_SYM_NORCP;
23717 /* Additional RBs are allocated by considering the loss due to
23718 the reserved symbols for CFICH, PBCH, PSS, SSS and cell specific RS */
23720 divResult = (numOvrlapgPbchRb * pbchSsRsSym)/totSym;
23721 if((numOvrlapgPbchRb * pbchSsRsSym) % totSym)
23725 addtlRbsReq = divResult;
23727 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, addtlRbsReq, addtlRbsAdd)
23729 /*Now RBs requires is original requested RBs + these additional RBs to make
23730 * up for PSS/SSS/BCCH.*/
23731 allocInfo->rbsReq = allocInfo->rbsReq + addtlRbsAdd;
23733 /*Check if with these additional RBs we have taken up, these are also falling
23734 * under PBCH RBs range, if yes then we would need to account for
23735 * PSS/BSS/BCCH for these additional RBs too.*/
23736 if(addtlRbsAdd && ((dlSf->bwAlloced + allocInfo->rbsReq - addtlRbsAdd) < (cell->pbchRbEnd)))
23738 if((dlSf->bwAlloced + allocInfo->rbsReq) <= (cell->pbchRbEnd))
23740 /*With additional RBs taken into account, we are not crossing the
23741 * PBCH RB end boundary.Thus here we need to account just for
23742 * overlapping PBCH RBs for these additonal RBs.*/
23743 divResult = (addtlRbsAdd * pbchSsRsSym)/totSym;
23744 if((addtlRbsAdd * pbchSsRsSym) % totSym)
23749 moreAddtlRbsReq = divResult;
23751 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23753 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23758 /*Here we have crossed the PBCH RB end boundary, thus we need to take
23759 * into account the overlapping RBs for additional RBs which will be
23760 * subset of addtlRbs.*/
23761 numOvrlapgAdtlPbchRb = (cell->pbchRbEnd) - ((dlSf->bwAlloced + allocInfo->rbsReq) - addtlRbsAdd);
23763 divResult = (numOvrlapgAdtlPbchRb * pbchSsRsSym)/totSym;
23764 if((numOvrlapgAdtlPbchRb * pbchSsRsSym) % totSym)
23769 moreAddtlRbsReq = divResult;
23771 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23773 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23776 if (isBcchPcch == TRUE)
23781 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23784 /* This case might be for Imcs value 6 and NPrb = 1 case - Not
23785 Adjusting either RBs or Imcs or Bytes Allocated */
23786 allocInfo->rbsReq = allocInfo->rbsReq - addtlRbsAdd - moreAddtlRbsAdd;
23788 else if(tbs && ((0 == addtlRbsAdd) && (moreAddtlRbsAdd == 0)))
23790 /*In case of a situation where we the entire bandwidth is already occupied
23791 * and we dont have room to add additional Rbs then in order to decrease the
23792 * code rate we reduce the tbsize such that we reduce the present calculated
23793 * tbsize by number of bytes that would be occupied by PBCH/PSS/SSS in overlapping
23794 * rbs and find the nearest tbsize which would be less than this deduced value*/
23796 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23798 noLyr = allocInfo->tbInfo[0].noLyr;
23799 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyr - 1], tbs);
23800 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23802 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,bytesReq);
23804 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23806 noLyr = allocInfo->tbInfo[1].noLyr;
23807 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23808 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,bytesReq);
23812 else if(tbs && ((addtlRbsAdd != addtlRbsReq) ||
23813 (addtlRbsAdd && (moreAddtlRbsReq != moreAddtlRbsAdd))))
23815 /*In case of a situation where we were not able to add required number of
23816 * additional RBs then we adjust the Imcs based on original RBs requested.
23817 * Doing this would comensate for the few extra Rbs we have added but inorder
23818 * to comensate for number of RBS we couldnt add we again do the TBSize adjustment*/
23820 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23822 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23824 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23827 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23828 numOvrlapgPbchRb = numOvrlapgPbchRb - (addtlRbsAdd + moreAddtlRbsAdd);
23830 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,allocInfo->tbInfo[0].bytesReq);
23832 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23834 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,allocInfo->tbInfo[1].bytesReq);
23840 /*We hit this code when we were able to add the required additional RBS
23841 * hence we should adjust the IMcs based on orignals RBs requested*/
23843 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23845 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23847 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23852 } /* end of rgSCHCmnNonDlfsPbchRbAllocAdj */
23856 * @brief Performs RB allocation for frequency non-selective cell.
23860 * Function : rgSCHCmnNonDlfsCmnRbAlloc
23862 * Processing Steps:
23863 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
23865 * @param[in] RgSchCellCb *cell
23866 * @param[in, out] RgSchDlRbAlloc *allocInfo
23872 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc
23875 RgSchDlRbAlloc *allocInfo
23878 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
23880 RgSchDlRbAlloc *allocInfo;
23886 U8 pbchSsRsSym = 0;
23889 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
23891 RgSchDlSf *dlSf = allocInfo->dlSf;
23894 U8 spsRbsAlloc = 0;
23895 RgSchDlSfAllocInfo *dlSfAlloc = &allocInfo->dlSf->dlSfAllocInfo;
23897 TRC2(rgSCHCmnNonDlfsCmnRbAlloc);
23899 allocInfo->tbInfo[0].noLyr = 1;
23902 /* Note: Initialize the masks to 0, this might not be needed since alloInfo
23903 * is initialized to 0 at the beginning of allcoation */
23904 allocInfo->resAllocInfo.raType0Mask = 0;
23905 cmMemset((U8*)allocInfo->resAllocInfo.raType1Mask, 0,
23906 RG_SCH_NUM_RATYPE1_32BIT_MASK * sizeof (U32));
23907 cmMemset((U8*)allocInfo->resAllocInfo.raType2Mask, 0,
23908 RG_SCH_NUM_RATYPE2_32BIT_MASK * sizeof (U32));
23910 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
23911 (dlSf->bwAlloced == dlSf->bw))
23913 if(dlSf->bwAlloced == dlSf->bw)
23919 if (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced))
23922 if ((allocInfo->tbInfo[0].imcs < 29) && (dlSf->bwAlloced < dlSf->bw))
23924 if(allocInfo->tbInfo[0].imcs < 29)
23927 /* set the remaining RBs for the requested UE */
23928 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
23929 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23930 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[0][tbs][allocInfo->rbsReq - 1]/8;
23935 /* Attempt RA Type 2 allocation in SPS Bandwidth */
23936 if (dlSf->spsAllocdBw < cell->spsBwRbgInfo.numRbs)
23939 rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23940 allocInfo->rbsReq, &cell->spsBwRbgInfo, &rbStart,
23941 &allocInfo->resAllocInfo, FALSE);
23942 /* rbsAlloc assignment moved from line 16671 to here to avoid
23943 * compilation error. Recheck */
23944 dlSf->spsAllocdBw += spsRbsAlloc;
23947 #endif /* LTEMAC_SPS */
23955 /* Update allocation information */
23956 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
23957 if (allocInfo->pdcch == NULLP)
23961 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
23962 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
23963 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
23964 allocInfo->allocInfo.raType2.isLocal = TRUE;
23968 allocInfo->allocInfo.raType2.rbStart = rbStart;
23969 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
23970 allocInfo->rbsAlloc = allocInfo->rbsReq;
23981 if(!(dlSf->sfNum == 5))
23983 /* case for subframes 1 to 9 except 5 */
23985 allocInfo->allocInfo.raType2.rbStart = rbStart;
23987 /*Fix for ccpu00123918*/
23988 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
23993 pbchFrame = 1; /* case for subframe 5 */
23994 /* In subframe 5, symbols are reserved for PSS and SSS and CFICH
23995 and Cell Specific Reference Signals */
23996 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PSS_SSS_SYM) *
23997 RGSCH_NUM_SC_IN_RB + cell->numCellRSPerSf);
24003 /* In subframe 0, symbols are reserved for PSS, SSS, PBCH, CFICH and
24004 and Cell Specific Reference signals */
24005 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PBCH_SYM +
24006 RGSCH_NUM_PSS_SSS_SYM) * RGSCH_NUM_SC_IN_RB +
24007 cell->numCellRSPerSf);
24008 } /* end of outer else */
24011 (((dlSf->bwAlloced + allocInfo->rbsReq) - cell->pbchRbStart) > 0)&&
24012 (dlSf->bwAlloced < cell->pbchRbEnd))
24014 if(allocInfo->tbInfo[0].imcs < 29)
24016 rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo, pbchSsRsSym, TRUE);
24028 /*Fix for ccpu00123918*/
24029 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24030 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24031 allocInfo->rbsAlloc = allocInfo->rbsReq;
24033 /* LTE_ADV_FLAG_REMOVED_START */
24035 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
24037 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
24038 allocInfo->allocInfo.raType2.rbStart, \
24039 allocInfo->allocInfo.raType2.numRb);
24044 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
24045 allocInfo->allocInfo.raType2.rbStart, \
24046 allocInfo->allocInfo.raType2.numRb);
24052 /* LTE_ADV_FLAG_REMOVED_END */
24053 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24060 /* Update type 0, 1 and 2 masks */
24061 dlSfAlloc->raType0Mask |= allocInfo->resAllocInfo.raType0Mask;
24062 #ifdef RGSCH_SPS_UNUSED
24063 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
24065 dlSfAlloc->raType1Mask[idx] |=
24066 allocInfo->resAllocInfo.raType1Mask[idx];
24067 dlSfAlloc->raType1UsedRbs[idx] +=
24068 allocInfo->resAllocInfo.raType1UsedRbs[idx];
24071 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
24073 dlSfAlloc->raType2Mask[idx] |=
24074 allocInfo->resAllocInfo.raType2Mask[idx];
24084 * @brief Performs RB allocation for frequency non-selective cell.
24088 * Function : rgSCHCmnNonDlfsCmnRbAllocRar
24090 * Processing Steps:
24091 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
24093 * @param[in] RgSchCellCb *cell
24094 * @param[in, out] RgSchDlRbAlloc *allocInfo
24100 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAllocRar
24103 RgSchDlRbAlloc *allocInfo
24106 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
24108 RgSchDlRbAlloc *allocInfo;
24111 RgSchDlSf *dlSf = allocInfo->dlSf;
24112 TRC2(rgSCHCmnNonDlfsCmnRbAllocRar);
24115 if(dlSf->bwAlloced == dlSf->bw)
24120 allocInfo->tbInfo[0].noLyr = 1;
24122 /* Update allocation information */
24123 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
24124 if (allocInfo->pdcch == NULLP)
24128 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
24129 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
24130 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
24131 allocInfo->allocInfo.raType2.isLocal = TRUE;
24133 /*Fix for ccpu00123918*/
24134 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24135 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24136 allocInfo->rbsAlloc = allocInfo->rbsReq;
24138 /* LTE_ADV_FLAG_REMOVED_END */
24139 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24142 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, NULLP, dlSf, 13, TFU_DCI_FORMAT_B1, FALSE);
24143 if (allocInfo->pdcch == NULLP)
24147 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
24148 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
24150 printf("5GTF_ERROR vrbg allocated > 25\n");
24154 allocInfo->tbInfo[0].cmnGrnt.vrbgStart = beamInfo->vrbgStart;
24155 allocInfo->tbInfo[0].cmnGrnt.numVrbg = allocInfo->vrbgReq;
24157 /* Update allocation information */
24158 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
24160 allocInfo->tbInfo[0].cmnGrnt.xPDSCHRange = 1;
24161 allocInfo->tbInfo[0].cmnGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
24162 allocInfo->tbInfo[0].cmnGrnt.vrbgStart, allocInfo->tbInfo[0].cmnGrnt.numVrbg);
24164 allocInfo->tbInfo[0].cmnGrnt.rbStrt = (allocInfo->tbInfo[0].cmnGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
24165 allocInfo->tbInfo[0].cmnGrnt.numRb = (allocInfo->tbInfo[0].cmnGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
24167 beamInfo->vrbgStart += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24168 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24169 allocInfo->tbInfo[0].cmnGrnt.rv = 0;
24170 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24173 printf("\n[%s],allocInfo->tbInfo[0].bytesAlloc:%u,vrbgReq:%u\n",
24174 __func__,allocInfo->tbInfo[0].bytesAlloc,allocInfo->vrbgReq);
24180 /* LTE_ADV_FLAG_REMOVED_START */
24183 * @brief To check if DL BW available for non-DLFS allocation.
24187 * Function : rgSCHCmnNonDlfsBwAvlbl
24189 * Processing Steps:
24190 * - Determine availability based on RA Type.
24192 * @param[in] RgSchCellCb *cell
24193 * @param[in] RgSchDlSf *dlSf
24194 * @param[in] RgSchDlRbAlloc *allocInfo
24202 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl
24205 RgSchSFRPoolInfo **sfrpoolInfo,
24207 RgSchDlRbAlloc *allocInfo,
24211 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl(cell, sfrpoolInfo, dlSf, allocInfo, isUeCellEdge)
24213 RgSchSFRPoolInfo **sfrpoolInfo;
24215 RgSchDlRbAlloc *allocInfo;
24223 RgSchSFRPoolInfo *sfrPool;
24224 RgSchSFRPoolInfo *sfrCEPool;
24228 RgSchSFRPoolInfo *poolWithMaxAvlblBw = NULLP;
24230 U32 addtnlPRBs = 0;
24232 if (dlSf->bw <= dlSf->bwAlloced)
24234 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
24235 "BW is fully allocated for subframe (%d) CRNTI:%d", dlSf->sfNum,allocInfo->rnti);
24239 if (dlSf->sfrTotalPoolInfo.ccBwFull == TRUE)
24241 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24242 "BW is fully allocated for CC Pool CRNTI:%d",allocInfo->rnti);
24246 if ((dlSf->sfrTotalPoolInfo.ceBwFull == TRUE) && (isUeCellEdge))
24248 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24249 "BW is fully allocated for CE Pool CRNTI:%d",allocInfo->rnti);
24253 /* We first check if the ue scheduled is a cell edge or cell centre and accordingly check the avaialble
24254 memory in their pool. If the cell centre UE doesnt have Bw available in its pool, then it will check
24255 Bw availability in cell edge pool but the other way around is NOT possible. */
24258 l = &dlSf->sfrTotalPoolInfo.cePool;
24262 l = &dlSf->sfrTotalPoolInfo.ccPool;
24265 n = cmLListFirst(l);
24269 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24271 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24273 /* MS_FIX for ccpu00123919 : Number of RBs in case of RETX should be same as that of initial transmission. */
24274 if(allocInfo->tbInfo[0].tbCb->txCntr)
24276 /* If RB assignment is being done for RETX. Then if reqRbs are a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24277 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24278 if (allocInfo->rbsReq % cell->rbgSize == 0)
24280 if ((sfrPool->type2End == dlSf->type2End) && dlSf->lstRbgDfct)
24282 /* In this scenario we are wasting the last RBG for this dlSf */
24283 sfrPool->type0End--;
24284 sfrPool->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24286 dlSf->lstRbgDfct = 0;
24288 /*ABHINAV To check if these variables need to be taken care of*/
24290 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24295 if (dlSf->lstRbgDfct)
24297 /* Check if type0 allocation can cater to this RETX requirement */
24298 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24304 if (sfrPool->type2End != dlSf->type2End) /*Search again for some pool which has the END RBG of the BandWidth*/
24312 /* cannot allocate same number of required RBs */
24318 /*rg002.301 ccpu00120391 MOD condition is modified approprialtely to find if rbsReq is less than available RBS*/
24319 if(allocInfo->rbsReq <= (((sfrPool->type0End - sfrPool->type2End + 1)*\
24320 cell->rbgSize) - dlSf->lstRbgDfct))
24322 *sfrpoolInfo = sfrPool;
24327 if (sfrPool->bw <= sfrPool->bwAlloced + cell->rbgSize)
24329 n = cmLListNext(l);
24330 /* If the ue is cell centre then it will simply check the memory available in next pool.
24331 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24333 if((!isUeCellEdge) && (!n->node))
24335 l = &dlSf->sfrTotalPoolInfo.cePool;
24336 n = cmLListFirst(l);
24342 /* MS_FIX: Number of RBs in case of RETX should be same as that of initial transmission */
24343 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24345 /*rg002.301 ccpu00120391 MOD setting the remaining RBs for the requested UE*/
24346 allocInfo->rbsReq = (((sfrPool->type0End - sfrPool->type2End + 1)*\
24347 cell->rbgSize) - dlSf->lstRbgDfct);
24348 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24349 noLyrs = allocInfo->tbInfo[0].noLyr;
24350 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24351 *sfrpoolInfo = sfrPool;
24356 n = cmLListNext(l);
24358 /* If the ue is cell centre then it will simply check the memory available in next pool.
24359 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24360 if((!isUeCellEdge) && (!n->node))
24362 l = &dlSf->sfrTotalPoolInfo.cePool;
24363 n = cmLListFirst(l);
24369 // RETVALUE(FALSE);
24372 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24374 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24375 /* This is a Case where a UE was CC and had more RBs allocated than present in CE pool.
24376 In case this UE whn become CE with retx going on, then BW is not sufficient for Retx */
24377 if ((isUeCellEdge) &&
24378 (allocInfo->tbInfo[0].tbCb->txCntr != 0))
24380 if(allocInfo->rbsReq > (sfrPool->bw - sfrPool->bwAlloced))
24382 /* Adjust CE BW such that Retx alloc is successful */
24383 /* Check if merging CE with adjacent CC pool will be sufficient to process Retx */
24385 /* If no Type 0 allocations are made from this pool */
24386 if (sfrPool->type0End == (((sfrPool->poolendRB + 1) / cell->rbgSize) - 1))
24388 if (sfrPool->adjCCPool &&
24389 (sfrPool->adjCCPool->type2Start == sfrPool->poolendRB + 1) &&
24390 (allocInfo->rbsReq <= ((sfrPool->bw - sfrPool->bwAlloced) +
24391 ((sfrPool->adjCCPool->bw - sfrPool->adjCCPool->bwAlloced)))))
24393 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24395 /* Adjusting CE Pool Info */
24396 sfrPool->bw += addtnlPRBs;
24397 sfrPool->type0End = ((sfrPool->poolendRB + addtnlPRBs + 1) /
24398 cell->rbgSize) - 1;
24400 /* Adjusting CC Pool Info */
24401 sfrPool->adjCCPool->type2Start += addtnlPRBs;
24402 sfrPool->adjCCPool->type2End = RGSCH_CEIL(sfrPool->adjCCPool->type2Start,
24404 sfrPool->adjCCPool->bw -= addtnlPRBs;
24405 *sfrpoolInfo = sfrPool;
24412 /* Check if CC pool is one of the following:
24413 * 1. |CE| + |CC "CCPool2Exists" = TRUE|
24414 * 2. |CC "CCPool2Exists" = FALSE| + |CE| + |CC "CCPool2Exists" = TRUE|
24416 if(TRUE == sfrPool->CCPool2Exists)
24418 l1 = &dlSf->sfrTotalPoolInfo.cePool;
24419 n1 = cmLListFirst(l1);
24420 sfrCEPool = (RgSchSFRPoolInfo*)(n1->node);
24421 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced))
24423 *sfrpoolInfo = sfrCEPool;
24426 else if(allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24428 *sfrpoolInfo = sfrPool;
24431 /* Check if CE and CC boundary has unallocated prbs */
24432 else if ((sfrPool->poolstartRB == sfrPool->type2Start) &&
24433 (sfrCEPool->type0End == ((sfrCEPool->poolendRB + 1) / cell->rbgSize) - 1))
24435 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced) +
24436 (sfrPool->bw - sfrPool->bwAlloced))
24438 /* Checking if BW can be allocated partly from CE pool and partly
24441 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24442 /* Updating CE and CC type2 parametrs based on the RBs allocated
24443 * from these pools*/
24444 sfrPool->type2Start -= addtnlPRBs;
24445 sfrPool->type2End = RGSCH_CEIL(sfrPool->type2Start, cell->rbgSize);
24446 sfrPool->bw += addtnlPRBs;
24447 if (addtnlPRBs == (sfrCEPool->bw - sfrCEPool->bwAlloced))
24449 sfrCEPool->bwAlloced = sfrCEPool->bw;
24450 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24454 sfrCEPool->bw -= addtnlPRBs;
24455 sfrCEPool->type0End = ((sfrCEPool->poolendRB + 1 - addtnlPRBs) / cell->rbgSize) - 1;
24457 *sfrpoolInfo = sfrPool;
24460 else if ( bwAvlbl <
24461 ((sfrCEPool->bw - sfrCEPool->bwAlloced) +
24462 (sfrPool->bw - sfrPool->bwAlloced)))
24464 /* All the Prbs from CE BW shall be allocated */
24465 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24467 sfrPool->type2Start = sfrCEPool->type2Start;
24468 sfrPool->bw += sfrCEPool->bw - sfrCEPool->bwAlloced;
24469 sfrCEPool->type2Start = sfrCEPool->poolendRB + 1;
24470 sfrCEPool->bwAlloced = sfrCEPool->bw;
24471 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24473 /* set the remaining RBs for the requested UE */
24474 allocInfo->rbsReq = (sfrPool->bw - sfrPool->bwAlloced);
24475 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24476 noLyrs = allocInfo->tbInfo[0].noLyr;
24477 allocInfo->tbInfo[0].bytesReq =
24478 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24479 *sfrpoolInfo = sfrPool;
24490 /* Checking if no. of RBs required can be allocated from
24492 * 1. If available return the SFR pool.
24493 * 2. Else update the RBs required parameter based on the
24494 * BW available in the pool
24495 * 3. Return FALSE if no B/W is available.
24497 if (allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24499 *sfrpoolInfo = sfrPool;
24504 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24506 if (bwAvlbl < sfrPool->bw - sfrPool->bwAlloced)
24510 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24512 bwAvlbl = sfrPool->bw - sfrPool->bwAlloced;
24513 poolWithMaxAvlblBw = sfrPool;
24515 n = cmLListNext(l);
24517 if ((isUeCellEdge == FALSE) && (n == NULLP))
24519 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24521 l = &dlSf->sfrTotalPoolInfo.cePool;
24522 n = cmLListFirst(l);
24532 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24536 dlSf->sfrTotalPoolInfo.ccBwFull = TRUE;
24542 /* set the remaining RBs for the requested UE */
24543 allocInfo->rbsReq = poolWithMaxAvlblBw->bw -
24544 poolWithMaxAvlblBw->bwAlloced;
24545 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24546 noLyrs = allocInfo->tbInfo[0].noLyr;
24547 allocInfo->tbInfo[0].bytesReq =
24548 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24549 *sfrpoolInfo = poolWithMaxAvlblBw;
24556 n = cmLListNext(l);
24558 if ((isUeCellEdge == FALSE) && (n == NULLP))
24560 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24562 l = &dlSf->sfrTotalPoolInfo.cePool;
24563 n = cmLListFirst(l);
24579 #endif /* end of ifndef LTE_TDD*/
24580 /* LTE_ADV_FLAG_REMOVED_END */
24583 * @brief To check if DL BW available for non-DLFS allocation.
24587 * Function : rgSCHCmnNonDlfsUeRbAlloc
24589 * Processing Steps:
24590 * - Determine availability based on RA Type.
24592 * @param[in] RgSchCellCb *cell
24593 * @param[in] RgSchDlSf *dlSf
24594 * @param[in] RgSchDlRbAlloc *allocInfo
24602 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl
24606 RgSchDlRbAlloc *allocInfo
24609 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl(cell, dlSf, allocInfo)
24612 RgSchDlRbAlloc *allocInfo;
24617 U8 ignoredDfctRbg = FALSE;
24619 TRC2(rgSCHCmnNonDlfsBwAvlbl);
24620 if (dlSf->bw <= dlSf->bwAlloced)
24622 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId, "(%d:%d)FAILED CRNTI:%d",
24623 dlSf->bw, dlSf->bwAlloced,allocInfo->rnti);
24626 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24628 /* Fix for ccpu00123919 : Number of RBs in case of RETX should be same as
24629 * that of initial transmission. */
24630 if(allocInfo->tbInfo[0].tbCb->txCntr)
24632 /* If RB assignment is being done for RETX. Then if reqRbs are
24633 * a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24634 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24635 if (allocInfo->rbsReq % cell->rbgSize == 0)
24637 if (dlSf->lstRbgDfct)
24639 /* In this scenario we are wasting the last RBG for this dlSf */
24642 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24643 /* Fix: MUE_PERTTI_DL */
24644 dlSf->lstRbgDfct = 0;
24645 ignoredDfctRbg = TRUE;
24651 if (dlSf->lstRbgDfct)
24653 /* Check if type0 allocation can cater to this RETX requirement */
24654 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24661 /* cannot allocate same number of required RBs */
24667 /* Condition is modified approprialtely to find
24668 * if rbsReq is less than available RBS*/
24669 if(allocInfo->rbsReq <= (((dlSf->type0End - dlSf->type2End + 1)*\
24670 cell->rbgSize) - dlSf->lstRbgDfct))
24674 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24675 * allocation in TDD when requested RBs are more than available RBs*/
24678 /* MS_WORKAROUND for ccpu00122022 */
24679 if (dlSf->bw < dlSf->bwAlloced + cell->rbgSize)
24681 /* ccpu00132358- Re-assigning the values which were updated above
24682 * if it is RETX and Last RBG available*/
24683 if(ignoredDfctRbg == TRUE)
24686 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24687 dlSf->lstRbgDfct = 1;
24693 /* Fix: Number of RBs in case of RETX should be same as
24694 * that of initial transmission. */
24695 if(allocInfo->tbInfo[0].tbCb->txCntr == 0
24697 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24701 /* Setting the remaining RBs for the requested UE*/
24702 allocInfo->rbsReq = (((dlSf->type0End - dlSf->type2End + 1)*\
24703 cell->rbgSize) - dlSf->lstRbgDfct);
24704 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24705 noLyrs = allocInfo->tbInfo[0].noLyr;
24706 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24707 /* DwPts Scheduling Changes Start */
24709 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24711 allocInfo->tbInfo[0].bytesReq =
24712 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24715 /* DwPts Scheduling Changes End */
24719 /* ccpu00132358- Re-assigning the values which were updated above
24720 * if it is RETX and Last RBG available*/
24721 if(ignoredDfctRbg == TRUE)
24724 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24725 dlSf->lstRbgDfct = 1;
24728 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "FAILED for CRNTI:%d",
24730 printf ("RB Alloc failed for LAA TB type 0\n");
24736 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24738 if (allocInfo->rbsReq <= (dlSf->bw - dlSf->bwAlloced))
24742 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24743 * allocation in TDD when requested RBs are more than available RBs*/
24746 /* Fix: Number of RBs in case of RETX should be same as
24747 * that of initial transmission. */
24748 if((allocInfo->tbInfo[0].tbCb->txCntr == 0)
24750 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24754 /* set the remaining RBs for the requested UE */
24755 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
24756 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24757 noLyrs = allocInfo->tbInfo[0].noLyr;
24758 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24759 /* DwPts Scheduling Changes Start */
24761 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24763 allocInfo->tbInfo[0].bytesReq =
24764 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24767 /* DwPts Scheduling Changes End */
24771 printf ("RB Alloc failed for LAA TB type 2\n");
24772 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24775 /* Fix: Number of RBs in case of RETX should be same as
24776 * that of initial transmission. */
24780 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24784 /* LTE_ADV_FLAG_REMOVED_START */
24787 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24791 * Function : rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24793 * Processing Steps:
24795 * @param[in] RgSchCellCb *cell
24796 * @param[in] RgSchDlSf *dlSf
24797 * @param[in] U8 rbStrt
24798 * @param[in] U8 numRb
24803 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24811 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
24820 RgSchSFRPoolInfo *sfrPool;
24821 TRC2(rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc);
24823 l = &dlSf->sfrTotalPoolInfo.ccPool;
24825 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24826 dlSf->bwAlloced += numRb;
24827 dlSf->type2Start += numRb;
24828 n = cmLListFirst(l);
24832 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24833 n = cmLListNext(l);
24835 /* If the pool contains some RBs allocated in this allocation, e.g: Pool is [30.50]. Pool->type2Start is 40 , dlSf->type2Start is 45. then update the variables in pool */
24836 if((sfrPool->poolendRB >= dlSf->type2Start) && (sfrPool->type2Start < dlSf->type2Start))
24838 sfrPool->type2End = dlSf->type2End;
24839 sfrPool->bwAlloced = dlSf->type2Start - sfrPool->poolstartRB;
24840 sfrPool->type2Start = dlSf->type2Start;
24844 /* If the pool contains all RBs allocated in this allocation*/
24845 if(dlSf->type2Start > sfrPool->poolendRB)
24847 sfrPool->type2End = sfrPool->type0End + 1;
24848 sfrPool->bwAlloced = sfrPool->bw;
24849 sfrPool->type2Start = sfrPool->poolendRB + 1;
24854 if (l != &dlSf->sfrTotalPoolInfo.cePool)
24856 l = &dlSf->sfrTotalPoolInfo.cePool;
24857 n = cmLListFirst(l);
24867 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24871 * Function : rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24873 * Processing Steps:
24875 * @param[in] RgSchCellCb *cell
24876 * @param[in] RgSchDlSf *dlSf
24877 * @param[in] U8 rbStrt
24878 * @param[in] U8 numRb
24884 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24893 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc(cell, ue, dlSf, rbStrt, numRb)
24903 RgSchSFRPoolInfo *sfrCCPool1 = NULL;
24904 RgSchSFRPoolInfo *sfrCCPool2 = NULL;
24907 TRC2(rgSCHCmnNonDlfsUpdDSFRTyp2Alloc);
24908 /* Move the type2End pivot forward */
24911 l = &dlSf->sfrTotalPoolInfo.ccPool;
24912 n = cmLListFirst(l);
24915 sfrCCPool1 = (RgSchSFRPoolInfo*)(n->node);
24917 if (sfrCCPool1 == NULLP)
24919 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24920 "sfrCCPool1 is NULL for CRNTI:%d",ue->ueId);
24923 n = cmLListNext(l);
24926 sfrCCPool2 = (RgSchSFRPoolInfo*)(n->node);
24927 n = cmLListNext(l);
24929 if((sfrCCPool1) && (sfrCCPool2))
24931 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
24932 if(((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24933 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb)) ||
24934 ((dlSf->type2Start >= sfrCCPool2->pwrHiCCRange.startRb) &&
24935 (dlSf->type2Start + numRb < sfrCCPool2->pwrHiCCRange.endRb)))
24937 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24939 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24940 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24943 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24944 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
24951 if((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24952 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb))
24954 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24956 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24957 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24960 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24961 "rgSCHCmnBuildRntpInfo() function returned RFAILED CRNTI:%d",ue->ueId);
24967 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24969 dlSf->bwAlloced += numRb;
24970 /*MS_FIX for ccpu00123918*/
24971 dlSf->type2Start += numRb;
24977 #endif /* end of ifndef LTE_TDD*/
24978 /* LTE_ADV_FLAG_REMOVED_END */
24980 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24984 * Function : rgSCHCmnNonDlfsUpdTyp2Alloc
24986 * Processing Steps:
24988 * @param[in] RgSchCellCb *cell
24989 * @param[in] RgSchDlSf *dlSf
24990 * @param[in] U8 rbStrt
24991 * @param[in] U8 numRb
24996 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc
25004 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
25011 TRC2(rgSCHCmnNonDlfsUpdTyp2Alloc);
25012 /* Move the type2End pivot forward */
25013 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25014 //#ifndef LTEMAC_SPS
25015 dlSf->bwAlloced += numRb;
25016 /*Fix for ccpu00123918*/
25017 dlSf->type2Start += numRb;
25023 * @brief To do DL allocation using TYPE0 RA.
25027 * Function : rgSCHCmnNonDlfsType0Alloc
25029 * Processing Steps:
25030 * - Perform TYPE0 allocation using the RBGs between
25031 * type0End and type2End.
25032 * - Build the allocation mask as per RBG positioning.
25033 * - Update the allocation parameters.
25035 * @param[in] RgSchCellCb *cell
25036 * @param[in] RgSchDlSf *dlSf
25037 * @param[in] RgSchDlRbAlloc *allocInfo
25043 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
25047 RgSchDlRbAlloc *allocInfo,
25051 PRIVATE Void rgSCHCmnNonDlfsType0Alloc(cell, dlSf, allocInfo, dlUe)
25054 RgSchDlRbAlloc *allocInfo;
25058 U32 dlAllocMsk = 0;
25059 U8 rbgFiller = dlSf->lstRbgDfct;
25060 U8 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25061 //U8 noRbgs = (allocInfo->rbsReq + rbgFiller)/ cell->rbgSize;
25065 U32 tb1BytesAlloc = 0;
25066 U32 tb2BytesAlloc = 0;
25067 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25069 TRC2(rgSCHCmnNonDlfsType0Alloc);
25070 //if(noRbgs == 0) noRbgs = 1; /* Not required as ceilling is used above*/
25072 /* Fix for ccpu00123919*/
25073 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25074 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25080 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25083 /* Fix for ccpu00138701: Ceilling is using to derive num of RBGs, Therefore,
25084 * after this operation,checking Max TB size and Max RBs are not crossed
25085 * if it is crossed then decrement num of RBGs. */
25086 //if((noRbs + rbgFiller) % cell->rbgSize)
25087 if((noRbs > allocInfo->rbsReq) &&
25088 (allocInfo->rbsReq + rbgFiller) % cell->rbgSize)
25089 {/* considering ue category limitation
25090 * due to ceiling */
25093 if (rgSCHLaaIsLaaTB(allocInfo)== FALSE)
25096 if ((allocInfo->tbInfo[0].schdlngForTb) && (!allocInfo->tbInfo[0].tbCb->txCntr))
25098 iTbs = allocInfo->tbInfo[0].iTbs;
25099 noLyr = allocInfo->tbInfo[0].noLyr;
25100 tb1BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25103 if ((allocInfo->tbInfo[1].schdlngForTb) && (!allocInfo->tbInfo[1].tbCb->txCntr))
25105 iTbs = allocInfo->tbInfo[1].iTbs;
25106 noLyr = allocInfo->tbInfo[1].noLyr;
25107 tb2BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25111 /* Only Check for New Tx No need for Retx */
25112 if (tb1BytesAlloc || tb2BytesAlloc)
25114 if (( ue->dl.aggTbBits >= dlUe->maxTbBits) ||
25115 (tb1BytesAlloc >= dlUe->maxTbSz/8) ||
25116 (tb2BytesAlloc >= dlUe->maxTbSz/8) ||
25117 (noRbs >= dlUe->maxRb))
25123 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25127 /* type0End would have been initially (during subfrm Init) at the bit position
25128 * (cell->noOfRbgs - 1), 0 being the most significant.
25129 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25130 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - dlSf->type0End));
25131 /* Move backwards the type0End pivot */
25132 dlSf->type0End -= noRbgs;
25133 /*Fix for ccpu00123919*/
25134 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25135 /* Update the bwAlloced field accordingly */
25136 //#ifndef LTEMAC_SPS /* ccpu00129474*/
25137 dlSf->bwAlloced += noRbs;
25139 /* Update Type0 Alloc Info */
25140 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25141 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25142 allocInfo->rbsAlloc = noRbs;
25144 /* Update Tb info for each scheduled TB */
25145 iTbs = allocInfo->tbInfo[0].iTbs;
25146 noLyr = allocInfo->tbInfo[0].noLyr;
25147 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25148 * RETX TB Size is same as Init TX TB Size */
25149 if (allocInfo->tbInfo[0].tbCb->txCntr)
25151 allocInfo->tbInfo[0].bytesAlloc =
25152 allocInfo->tbInfo[0].bytesReq;
25156 allocInfo->tbInfo[0].bytesAlloc =
25157 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25158 /* DwPts Scheduling Changes Start */
25160 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25162 allocInfo->tbInfo[0].bytesAlloc =
25163 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25166 /* DwPts Scheduling Changes End */
25169 if (allocInfo->tbInfo[1].schdlngForTb)
25171 iTbs = allocInfo->tbInfo[1].iTbs;
25172 noLyr = allocInfo->tbInfo[1].noLyr;
25173 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25174 * RETX TB Size is same as Init TX TB Size */
25175 if (allocInfo->tbInfo[1].tbCb->txCntr)
25177 allocInfo->tbInfo[1].bytesAlloc =
25178 allocInfo->tbInfo[1].bytesReq;
25182 allocInfo->tbInfo[1].bytesAlloc =
25183 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25184 /* DwPts Scheduling Changes Start */
25186 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25188 allocInfo->tbInfo[1].bytesAlloc =
25189 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25192 /* DwPts Scheduling Changes End */
25196 /* The last RBG which can be smaller than the RBG size is consedered
25197 * only for the first time allocation of TYPE0 UE */
25198 dlSf->lstRbgDfct = 0;
25205 * @brief To prepare RNTP value from the PRB allocation (P-High -> 1 and P-Low -> 0)
25209 * Function : rgSCHCmnBuildRntpInfo
25211 * Processing Steps:
25213 * @param[in] U8 *rntpPtr
25214 * @param[in] U8 startRb
25215 * @param[in] U8 numRb
25221 PRIVATE S16 rgSCHCmnBuildRntpInfo
25230 PRIVATE S16 rgSCHCmnBuildRntpInfo(cell, rntpPtr, startRb, nmbRb, bw)
25238 U16 rbPtrStartIdx; /* Start Index of Octete Buffer to be filled */
25239 U16 rbPtrEndIdx; /* End Index of Octete Buffer to be filled */
25240 U16 rbBitLoc; /* Bit Location to be set as 1 in the current Byte */
25241 U16 nmbRbPerByte; /* PRB's to be set in the current Byte (in case of multiple Bytes) */
25243 TRC2(rgSCHCmnBuildRntpInfo);
25245 rbPtrStartIdx = (startRb)/8;
25246 rbPtrEndIdx = (startRb + nmbRb)/8;
25248 if (rntpPtr == NULLP)
25250 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
25251 "rgSCHCmnBuildRntpInfo():"
25252 "rntpPtr can't be NULLP (Memory Allocation Failed)");
25256 while(rbPtrStartIdx <= rbPtrEndIdx)
25258 rbBitLoc = (startRb)%8;
25260 /* case 1: startRb and endRb lies in same Byte */
25261 if (rbPtrStartIdx == rbPtrEndIdx)
25263 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25264 | (((1<<nmbRb)-1)<<rbBitLoc);
25267 /* case 2: startRb and endRb lies in different Byte */
25268 if (rbPtrStartIdx != rbPtrEndIdx)
25270 nmbRbPerByte = 8 - rbBitLoc;
25271 nmbRb = nmbRb - nmbRbPerByte;
25272 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25273 | (((1<<nmbRbPerByte)-1)<<rbBitLoc);
25274 startRb = startRb + nmbRbPerByte;
25280 /* dsfr_pal_fixes ** 21-March-2013 ** SKS ** Adding Debug logs */
25282 /* dsfr_pal_fixes ** 25-March-2013 ** SKS ** Adding Debug logs to print RNTP */
25288 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
25292 * Function : rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25294 * Processing Steps:
25296 * @param[in] RgSchCellCb *cell
25297 * @param[in] RgSchDlSf *dlSf
25298 * @param[in] U8 rbStrt
25299 * @param[in] U8 numRb
25304 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25309 RgSchSFRPoolInfo *sfrPool,
25314 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrPool, rbStrt, numRb)
25318 RgSchSFRPoolInfo *sfrPool;
25327 TRC2(rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc);
25328 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25329 sfrPool->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25332 dlSf->type2Start += numRb;
25333 dlSf->bwAlloced += numRb;
25335 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
25337 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
25338 if(FALSE == ue->lteAdvUeCb.rgrLteAdvUeCfg.isUeCellEdge)
25340 if((sfrPool->type2Start >= sfrPool->pwrHiCCRange.startRb) &&
25341 (sfrPool->type2Start + numRb < sfrPool->pwrHiCCRange.endRb))
25343 ue->lteAdvUeCb.isCCUePHigh = TRUE;
25345 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25346 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25349 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25350 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25357 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25358 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25361 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25362 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25367 sfrPool->type2Start += numRb;
25368 sfrPool->bwAlloced += numRb;
25375 * @brief To do DL allocation using TYPE0 RA.
25379 * Function : rgSCHCmnNonDlfsSFRPoolType0Alloc
25381 * Processing Steps:
25382 * - Perform TYPE0 allocation using the RBGs between type0End and type2End.
25383 * - Build the allocation mask as per RBG positioning.
25384 * - Update the allocation parameters.
25386 * @param[in] RgSchCellCb *cell
25387 * @param[in] RgSchDlSf *dlSf
25388 * @param[in] RgSchDlRbAlloc *allocInfo
25393 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc
25397 RgSchSFRPoolInfo *poolInfo,
25398 RgSchDlRbAlloc *allocInfo
25401 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, poolInfo, allocInfo)
25404 RgSchSFRPoolInfo *poolInfo;
25405 RgSchDlRbAlloc *allocInfo;
25408 U32 dlAllocMsk = 0;
25415 TRC2(rgSCHCmnNonDlfsSFRPoolType0Alloc);
25417 if (poolInfo->poolstartRB + poolInfo->bw == dlSf->bw)
25419 if (poolInfo->type0End == dlSf->bw/4)
25421 rbgFiller = dlSf->lstRbgDfct;
25422 /* The last RBG which can be smaller than the RBG size is consedered
25423 * only for the first time allocation of TYPE0 UE */
25424 dlSf->lstRbgDfct = 0;
25428 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25430 /* Abhinav to-do start */
25431 /* MS_FIX for ccpu00123919*/
25432 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25433 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25439 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25441 /* Abhinav to-do end */
25445 /* type0End would have been initially (during subfrm Init) at the bit position
25446 * (cell->noOfRbgs - 1), 0 being the most significant.
25447 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25448 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - poolInfo->type0End));
25449 /* Move backwards the type0End pivot */
25450 poolInfo->type0End -= noRbgs;
25451 /*MS_FIX for ccpu00123919*/
25452 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25453 /* Update the bwAlloced field accordingly */
25454 poolInfo->bwAlloced += noRbs + dlSf->lstRbgDfct;
25455 dlSf->bwAlloced += noRbs + dlSf->lstRbgDfct;
25457 /* Update Type0 Alloc Info */
25458 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25459 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25460 allocInfo->rbsAlloc = noRbs;
25462 /* Update Tb info for each scheduled TB */
25463 iTbs = allocInfo->tbInfo[0].iTbs;
25464 noLyr = allocInfo->tbInfo[0].noLyr;
25465 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25466 * RETX TB Size is same as Init TX TB Size */
25467 if (allocInfo->tbInfo[0].tbCb->txCntr)
25469 allocInfo->tbInfo[0].bytesAlloc =
25470 allocInfo->tbInfo[0].bytesReq;
25474 allocInfo->tbInfo[0].bytesAlloc =
25475 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25478 if (allocInfo->tbInfo[1].schdlngForTb)
25480 iTbs = allocInfo->tbInfo[1].iTbs;
25481 noLyr = allocInfo->tbInfo[1].noLyr;
25482 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25483 * RETX TB Size is same as Init TX TB Size */
25484 if (allocInfo->tbInfo[1].tbCb->txCntr)
25486 allocInfo->tbInfo[1].bytesAlloc =
25487 allocInfo->tbInfo[1].bytesReq;
25491 allocInfo->tbInfo[1].bytesAlloc =
25492 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25496 /* The last RBG which can be smaller than the RBG size is consedered
25497 * only for the first time allocation of TYPE0 UE */
25498 dlSf->lstRbgDfct = 0;
25503 * @brief Computes RNTP Info for a subframe.
25507 * Function : rgSCHCmnNonDlfsDsfrRntpComp
25509 * Processing Steps:
25510 * - Computes RNTP info from individual pools.
25512 * @param[in] RgSchDlSf *dlSf
25518 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp
25524 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp(cell, dlSf)
25529 PRIVATE U16 samples = 0;
25531 U16 bwBytes = (dlSf->bw-1)/8;
25532 RgrLoadInfIndInfo *rgrLoadInf;
25536 TRC2(rgSCHCmnNonDlfsDsfrRntpComp);
25538 len = (dlSf->bw % 8 == 0) ? dlSf->bw/8 : dlSf->bw/8 + 1;
25540 /* RNTP info is ORed every TTI and the sample is stored in cell control block */
25541 for(i = 0; i <= bwBytes; i++)
25543 cell->rntpAggrInfo.val[i] |= dlSf->rntpInfo.val[i];
25545 samples = samples + 1;
25546 /* After every 1000 ms, the RNTP info will be sent to application to be further sent to all neighbouring eNB
25547 informing them about the load indication for cell edge users */
25548 if(RG_SCH_MAX_RNTP_SAMPLES == samples)
25551 ret = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&rgrLoadInf,
25552 sizeof(RgrLoadInfIndInfo));
25555 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
25556 "allocate memory for sending LoadInfo");
25560 rgrLoadInf->u.rntpInfo.pres = cell->rntpAggrInfo.pres;
25561 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25562 rgrLoadInf->u.rntpInfo.len = len;
25564 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25565 rgrLoadInf->u.rntpInfo.val = cell->rntpAggrInfo.val;
25566 rgrLoadInf->cellId = cell->cellId;
25568 /* dsfr_pal_fixes ** 22-March-2013 ** SKS */
25569 rgrLoadInf->bw = dlSf->bw;
25570 rgrLoadInf->type = RGR_SFR;
25572 ret = rgSCHUtlRgrLoadInfInd(cell, rgrLoadInf);
25575 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsDsfrRntpComp():"
25576 "rgSCHUtlRgrLoadInfInd() returned RFAILED");
25579 cmMemset(cell->rntpAggrInfo.val,0,len);
25583 /* LTE_ADV_FLAG_REMOVED_END */
25585 /* LTE_ADV_FLAG_REMOVED_START */
25587 * @brief Performs RB allocation per UE from a pool.
25591 * Function : rgSCHCmnSFRNonDlfsUeRbAlloc
25593 * Processing Steps:
25594 * - Allocate consecutively available RBs.
25596 * @param[in] RgSchCellCb *cell
25597 * @param[in] RgSchUeCb *ue
25598 * @param[in] RgSchDlSf *dlSf
25599 * @param[out] U8 *isDlBwAvail
25607 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc
25615 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25622 RgSchDlRbAlloc *allocInfo;
25623 RgSchCmnDlUe *dlUe;
25625 RgSchSFRPoolInfo *sfrpoolInfo = NULLP;
25627 TRC2(rgSCHCmnSFRNonDlfsUeRbAlloc);
25629 isUECellEdge = RG_SCH_CMN_IS_UE_CELL_EDGE(ue);
25631 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25632 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25633 *isDlBwAvail = TRUE;
25635 /*Find which pool is available for this UE*/
25636 if (rgSCHCmnNonDlfsSFRBwAvlbl(cell, &sfrpoolInfo, dlSf, allocInfo, isUECellEdge) != TRUE)
25638 /* SFR_FIX - If this is CE UE there may be BW available in CC Pool
25639 So CC UEs will be scheduled */
25642 *isDlBwAvail = TRUE;
25646 *isDlBwAvail = FALSE;
25651 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX || dlUe->proc->tbInfo[1].isAckNackDtx)
25653 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25657 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25660 if (!(allocInfo->pdcch))
25662 /* Returning ROK since PDCCH might be available for another UE and further allocations could be done */
25667 allocInfo->rnti = ue->ueId;
25670 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
25672 allocInfo->allocInfo.raType2.isLocal = TRUE;
25673 /* rg004.201 patch - ccpu00109921 fix end */
25674 /* MS_FIX for ccpu00123918*/
25675 allocInfo->allocInfo.raType2.rbStart = (U8)sfrpoolInfo->type2Start;
25676 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25677 /* rg007.201 - Changes for MIMO feature addition */
25678 /* rg008.201 - Removed dependency on MIMO compile-time flag */
25679 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrpoolInfo, \
25680 allocInfo->allocInfo.raType2.rbStart, \
25681 allocInfo->allocInfo.raType2.numRb);
25682 allocInfo->rbsAlloc = allocInfo->rbsReq;
25683 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25685 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
25687 rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, sfrpoolInfo, allocInfo);
25691 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,0);
25692 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
25694 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,1);
25699 #if defined(LTEMAC_SPS)
25700 /* Update the sub-frame with new allocation */
25701 dlSf->bwAlloced += allocInfo->rbsReq;
25707 /* LTE_ADV_FLAG_REMOVED_END */
25708 #endif /* LTE_TDD */
25711 * @brief Performs RB allocation per UE for frequency non-selective cell.
25715 * Function : rgSCHCmnNonDlfsUeRbAlloc
25717 * Processing Steps:
25718 * - Allocate consecutively available RBs.
25720 * @param[in] RgSchCellCb *cell
25721 * @param[in] RgSchUeCb *ue
25722 * @param[in] RgSchDlSf *dlSf
25723 * @param[out] U8 *isDlBwAvail
25730 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc
25738 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25745 RgSchDlRbAlloc *allocInfo;
25746 RgSchCmnDlUe *dlUe;
25750 TRC2(rgSCHCmnNonDlfsUeRbAlloc);
25753 RgSch5gtfUeCb *ue5gtfCb = &(ue->ue5gtfCb);
25754 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[ue5gtfCb->BeamId]);
25756 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25757 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25758 *isDlBwAvail = TRUE;
25760 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
25762 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25763 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
25765 printf("5GTF_ERROR vrbg allocated > 25\n");
25769 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX
25770 || dlUe->proc->tbInfo[1].isAckNackDtx)
25772 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25776 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25778 if (!(allocInfo->pdcch))
25780 /* Returning ROK since PDCCH might be available for another UE and
25781 * further allocations could be done */
25782 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25783 "5GTF_ERROR : PDCCH allocation failed :ue (%u)",
25785 printf("5GTF_ERROR PDCCH allocation failed\n");
25789 //maxPrb = RGSCH_MIN((allocInfo->vrbgReq * MAX_5GTF_VRBG_SIZE), ue5gtfCb->maxPrb);
25790 //maxPrb = RGSCH_MIN(maxPrb,
25791 //((beamInfo->totVrbgAvail - beamInfo->vrbgStart)* MAX_5GTF_VRBG_SIZE)));
25792 //TODO_SID Need to check for vrbg available after scheduling for same beam.
25793 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
25794 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
25795 //TODO_SID: Setting for max TP
25796 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
25797 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
25798 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
25799 allocInfo->tbInfo[0].tbCb->dlGrnt.SCID = 0;
25800 allocInfo->tbInfo[0].tbCb->dlGrnt.dciFormat = allocInfo->dciFormat;
25801 //Filling temporarily
25802 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
25803 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
25805 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25806 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25807 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25815 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
25819 * Function : rgSCHCmnNonDlfsCcchSduAlloc
25821 * Processing Steps:
25822 * - For each element in the list, Call rgSCHCmnNonDlfsCcchSduRbAlloc().
25823 * - If allocation is successful, add the ueCb to scheduled list of CCCH
25825 * - else, add UeCb to non-scheduled list.
25827 * @param[in] RgSchCellCb *cell
25828 * @param[in, out] RgSchCmnCcchSduRbAlloc *allocInfo
25829 * @param[in] U8 isRetx
25834 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc
25837 RgSchCmnCcchSduRbAlloc *allocInfo,
25841 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc(cell, allocInfo, isRetx)
25843 RgSchCmnCcchSduRbAlloc *allocInfo;
25848 CmLListCp *ccchSduLst = NULLP;
25849 CmLListCp *schdCcchSduLst = NULLP;
25850 CmLListCp *nonSchdCcchSduLst = NULLP;
25851 CmLList *schdLnkNode = NULLP;
25852 CmLList *toBeSchdLnk = NULLP;
25853 RgSchDlSf *dlSf = allocInfo->ccchSduDlSf;
25854 RgSchUeCb *ueCb = NULLP;
25855 RgSchDlHqProcCb *hqP = NULLP;
25856 TRC2(rgSCHCmnNonDlfsCcchSduAlloc);
25860 /* Initialize re-transmitting lists */
25861 ccchSduLst = &(allocInfo->ccchSduRetxLst);
25862 schdCcchSduLst = &(allocInfo->schdCcchSduRetxLst);
25863 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduRetxLst);
25867 /* Initialize transmitting lists */
25868 ccchSduLst = &(allocInfo->ccchSduTxLst);
25869 schdCcchSduLst = &(allocInfo->schdCcchSduTxLst);
25870 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduTxLst);
25873 /* Perform allocaations for the list */
25874 toBeSchdLnk = cmLListFirst(ccchSduLst);
25875 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
25877 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25878 ueCb = hqP->hqE->ue;
25879 schdLnkNode = &hqP->schdLstLnk;
25880 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25881 ret = rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf);
25884 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
25885 * list and return */
25888 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25889 ueCb = hqP->hqE->ue;
25890 schdLnkNode = &hqP->schdLstLnk;
25891 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25892 cmLListAdd2Tail(nonSchdCcchSduLst, schdLnkNode);
25893 toBeSchdLnk = toBeSchdLnk->next;
25894 } while(toBeSchdLnk);
25898 /* Allocation successful: Add UE to the scheduled list */
25899 cmLListAdd2Tail(schdCcchSduLst, schdLnkNode);
25907 * @brief Performs RB allocation for CcchSdu for frequency non-selective cell.
25911 * Function : rgSCHCmnNonDlfsCcchSduRbAlloc
25913 * Processing Steps:
25915 * - Allocate consecutively available RBs
25917 * @param[in] RgSchCellCb *cell
25918 * @param[in] RgSchUeCb *ueCb
25919 * @param[in] RgSchDlSf *dlSf
25925 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc
25932 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf)
25938 RgSchDlRbAlloc *allocInfo;
25939 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
25941 TRC2(rgSCHCmnNonDlfsCcchSduRbAlloc);
25944 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb,cell);
25946 /* [ccpu00138802]-MOD-If Bw is less than required, return fail
25947 It will be allocated in next TTI */
25949 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
25950 (dlSf->bwAlloced == dlSf->bw))
25952 if((dlSf->bwAlloced == dlSf->bw) ||
25953 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
25958 /* Retrieve PDCCH */
25959 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
25960 if (ueDl->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
25962 /* allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, dlSf, y, ueDl->cqi,
25963 * TFU_DCI_FORMAT_1A, TRUE);*/
25964 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, TRUE);
25968 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE);
25970 if (!(allocInfo->pdcch))
25972 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
25976 /* Update allocation information */
25977 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
25978 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
25979 allocInfo->allocInfo.raType2.isLocal = TRUE;
25981 /*Fix for ccpu00123918*/
25982 /* Push this harq process back to the free queue */
25983 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
25984 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25985 allocInfo->rbsAlloc = allocInfo->rbsReq;
25986 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25987 /* Update the sub-frame with new allocation */
25989 /* LTE_ADV_FLAG_REMOVED_START */
25991 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
25993 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf,
25994 allocInfo->allocInfo.raType2.rbStart,
25995 allocInfo->allocInfo.raType2.numRb);
25998 #endif /* end of ifndef LTE_TDD*/
26000 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf,
26001 allocInfo->allocInfo.raType2.rbStart,
26002 allocInfo->allocInfo.raType2.numRb);
26005 /* LTE_ADV_FLAG_REMOVED_END */
26006 /* ccpu00131941 - bwAlloced is updated from SPS bandwidth */
26014 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
26018 * Function : rgSCHCmnNonDlfsMsg4RbAlloc
26020 * Processing Steps:
26022 * - Allocate consecutively available RBs
26024 * @param[in] RgSchCellCb *cell
26025 * @param[in] RgSchRaCb *raCb
26026 * @param[in] RgSchDlSf *dlSf
26032 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc
26039 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf)
26045 RgSchDlRbAlloc *allocInfo;
26046 TRC2(rgSCHCmnNonDlfsMsg4RbAlloc);
26049 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_RACB(raCb);
26052 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
26053 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
26055 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
26056 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
26058 printf("5GTF_ERROR vrbg allocated > 25\n");
26063 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
26064 (dlSf->bwAlloced == dlSf->bw))
26066 if((dlSf->bwAlloced == dlSf->bw) ||
26067 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
26074 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
26075 if (raCb->dlHqE->msg4Proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
26077 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, TRUE);
26081 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, FALSE);
26083 if (!(allocInfo->pdcch))
26085 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
26090 /* SR_RACH_STATS : MSG4 TX Failed */
26091 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26093 /* Update allocation information */
26094 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
26095 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
26096 allocInfo->allocInfo.raType2.isLocal = TRUE;
26099 /*Fix for ccpu00123918*/
26100 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
26101 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
26102 /* LTE_ADV_FLAG_REMOVED_START */
26104 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
26106 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
26107 allocInfo->allocInfo.raType2.rbStart, \
26108 allocInfo->allocInfo.raType2.numRb);
26111 #endif /* end of ifndef LTE_TDD */
26113 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
26114 allocInfo->allocInfo.raType2.rbStart, \
26115 allocInfo->allocInfo.raType2.numRb);
26117 /* LTE_ADV_FLAG_REMOVED_END */
26119 allocInfo->rbsAlloc = allocInfo->rbsReq;
26120 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26124 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26126 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
26127 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
26129 /* Update allocation information */
26130 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26132 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
26133 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
26134 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
26136 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
26137 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
26140 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26141 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26142 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26150 * @brief Performs RB allocation for Msg4 lists of frequency non-selective cell.
26154 * Function : rgSCHCmnNonDlfsMsg4Alloc
26156 * Processing Steps:
26157 * - For each element in the list, Call rgSCHCmnNonDlfsMsg4RbAlloc().
26158 * - If allocation is successful, add the raCb to scheduled list of MSG4.
26159 * - else, add RaCb to non-scheduled list.
26161 * @param[in] RgSchCellCb *cell
26162 * @param[in, out] RgSchCmnMsg4RbAlloc *allocInfo
26163 * @param[in] U8 isRetx
26168 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc
26171 RgSchCmnMsg4RbAlloc *allocInfo,
26175 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc(cell, allocInfo, isRetx)
26177 RgSchCmnMsg4RbAlloc *allocInfo;
26182 CmLListCp *msg4Lst = NULLP;
26183 CmLListCp *schdMsg4Lst = NULLP;
26184 CmLListCp *nonSchdMsg4Lst = NULLP;
26185 CmLList *schdLnkNode = NULLP;
26186 CmLList *toBeSchdLnk = NULLP;
26187 RgSchDlSf *dlSf = allocInfo->msg4DlSf;
26188 RgSchRaCb *raCb = NULLP;
26189 RgSchDlHqProcCb *hqP = NULLP;
26190 TRC2(rgSCHCmnNonDlfsMsg4Alloc);
26194 /* Initialize re-transmitting lists */
26195 msg4Lst = &(allocInfo->msg4RetxLst);
26196 schdMsg4Lst = &(allocInfo->schdMsg4RetxLst);
26197 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4RetxLst);
26201 /* Initialize transmitting lists */
26202 msg4Lst = &(allocInfo->msg4TxLst);
26203 schdMsg4Lst = &(allocInfo->schdMsg4TxLst);
26204 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4TxLst);
26207 /* Perform allocaations for the list */
26208 toBeSchdLnk = cmLListFirst(msg4Lst);
26209 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26211 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26212 raCb = hqP->hqE->raCb;
26213 schdLnkNode = &hqP->schdLstLnk;
26214 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26215 ret = rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf);
26218 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
26219 * list and return */
26222 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26223 raCb = hqP->hqE->raCb;
26224 schdLnkNode = &hqP->schdLstLnk;
26225 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26226 cmLListAdd2Tail(nonSchdMsg4Lst, schdLnkNode);
26227 toBeSchdLnk = toBeSchdLnk->next;
26228 } while(toBeSchdLnk);
26232 /* Allocation successful: Add UE to the scheduled list */
26233 cmLListAdd2Tail(schdMsg4Lst, schdLnkNode);
26244 * @brief Performs RB allocation for the list of UEs of a frequency
26245 * non-selective cell.
26249 * Function : rgSCHCmnNonDlfsDedRbAlloc
26251 * Processing Steps:
26252 * - For each element in the list, Call rgSCHCmnNonDlfsUeRbAlloc().
26253 * - If allocation is successful, add the ueCb to scheduled list of UEs.
26254 * - else, add ueCb to non-scheduled list of UEs.
26256 * @param[in] RgSchCellCb *cell
26257 * @param[in, out] RgSchCmnUeRbAlloc *allocInfo
26258 * @param[in] CmLListCp *ueLst,
26259 * @param[in, out] CmLListCp *schdHqPLst,
26260 * @param[in, out] CmLListCp *nonSchdHqPLst
26265 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc
26268 RgSchCmnUeRbAlloc *allocInfo,
26270 CmLListCp *schdHqPLst,
26271 CmLListCp *nonSchdHqPLst
26274 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc(cell, allocInfo, ueLst,
26275 schdHqPLst, nonSchdHqPLst)
26277 RgSchCmnUeRbAlloc *allocInfo;
26279 CmLListCp *schdHqPLst;
26280 CmLListCp *nonSchdHqPLst;
26284 CmLList *schdLnkNode = NULLP;
26285 CmLList *toBeSchdLnk = NULLP;
26286 RgSchDlSf *dlSf = allocInfo->dedDlSf;
26287 RgSchUeCb *ue = NULLP;
26288 RgSchDlHqProcCb *hqP = NULLP;
26290 TRC2(rgSCHCmnNonDlfsDedRbAlloc);
26293 /* Perform allocaations for the list */
26294 toBeSchdLnk = cmLListFirst(ueLst);
26295 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26297 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26299 schdLnkNode = &hqP->schdLstLnk;
26300 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26302 ret = rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, &isDlBwAvail);
26305 /* Allocation failed: Add remaining UEs to non-scheduled
26306 * list and return */
26309 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26311 schdLnkNode = &hqP->schdLstLnk;
26312 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26313 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26314 toBeSchdLnk = toBeSchdLnk->next;
26315 } while(toBeSchdLnk);
26321 #if defined (TENB_STATS) && defined (RG_5GTF)
26322 cell->tenbStats->sch.dl5gtfRbAllocPass++;
26324 /* Allocation successful: Add UE to the scheduled list */
26325 cmLListAdd2Tail(schdHqPLst, schdLnkNode);
26329 #if defined (TENB_STATS) && defined (RG_5GTF)
26330 cell->tenbStats->sch.dl5gtfRbAllocFail++;
26332 /* Allocation failed : Add UE to the non-scheduled list */
26333 printf("5GTF_ERROR Dl rb alloc failed adding nonSchdHqPLst\n");
26334 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26342 * @brief Handles RB allocation for frequency non-selective cell.
26346 * Function : rgSCHCmnNonDlfsRbAlloc
26348 * Invoking Module Processing:
26349 * - SCH shall invoke this if downlink frequency selective is disabled for
26350 * the cell for RB allocation.
26351 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
26352 * estimate and subframe for each allocation to be made to SCH.
26354 * Processing Steps:
26355 * - Allocate sequentially for common channels.
26356 * - For transmitting and re-transmitting UE list.
26358 * - Perform wide-band allocations for UE in increasing order of
26360 * - Determine Imcs for the allocation.
26361 * - Determine RA type.
26362 * - Determine DCI format.
26364 * @param[in] RgSchCellCb *cell
26365 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
26370 PUBLIC Void rgSCHCmnNonDlfsRbAlloc
26373 RgSchCmnDlRbAllocInfo *allocInfo
26376 PUBLIC Void rgSCHCmnNonDlfsRbAlloc(cell, allocInfo)
26378 RgSchCmnDlRbAllocInfo *allocInfo;
26382 RgSchDlRbAlloc *reqAllocInfo;
26383 TRC2(rgSCHCmnNonDlfsRbAlloc);
26385 /* Allocate for MSG4 retransmissions */
26386 if (allocInfo->msg4Alloc.msg4RetxLst.count)
26388 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc RetxLst\n");
26389 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), TRUE);
26392 /* Allocate for MSG4 transmissions */
26393 /* Assuming all the nodes in the list need allocations: rbsReq is valid */
26394 if (allocInfo->msg4Alloc.msg4TxLst.count)
26396 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc txLst\n");
26397 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), FALSE);
26400 /* Allocate for CCCH SDU (received after guard timer expiry)
26401 * retransmissions */
26402 if (allocInfo->ccchSduAlloc.ccchSduRetxLst.count)
26404 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26405 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), TRUE);
26408 /* Allocate for CCCD SDU transmissions */
26409 /* Allocate for CCCH SDU (received after guard timer expiry) transmissions */
26410 if (allocInfo->ccchSduAlloc.ccchSduTxLst.count)
26412 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26413 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), FALSE);
26417 /* Allocate for Random access response */
26418 for (raRspCnt = 0; raRspCnt < RG_SCH_CMN_MAX_CMN_PDCCH; ++raRspCnt)
26420 /* Assuming that the requests will be filled in sequentially */
26421 reqAllocInfo = &(allocInfo->raRspAlloc[raRspCnt]);
26422 if (!reqAllocInfo->rbsReq)
26426 printf("5GTF_ERROR calling RAR rgSCHCmnNonDlfsCmnRbAlloc\n");
26427 // if ((rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo)) != ROK)
26428 if ((rgSCHCmnNonDlfsCmnRbAllocRar(cell, reqAllocInfo)) != ROK)
26434 /* Allocate for RETX+TX UEs */
26435 if(allocInfo->dedAlloc.txRetxHqPLst.count)
26437 printf("5GTF_ERROR TX RETX rgSCHCmnNonDlfsDedRbAlloc\n");
26438 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26439 &(allocInfo->dedAlloc.txRetxHqPLst),
26440 &(allocInfo->dedAlloc.schdTxRetxHqPLst),
26441 &(allocInfo->dedAlloc.nonSchdTxRetxHqPLst));
26444 if((allocInfo->dedAlloc.retxHqPLst.count))
26446 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26447 &(allocInfo->dedAlloc.retxHqPLst),
26448 &(allocInfo->dedAlloc.schdRetxHqPLst),
26449 &(allocInfo->dedAlloc.nonSchdRetxHqPLst));
26452 /* Allocate for transmitting UEs */
26453 if((allocInfo->dedAlloc.txHqPLst.count))
26455 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26456 &(allocInfo->dedAlloc.txHqPLst),
26457 &(allocInfo->dedAlloc.schdTxHqPLst),
26458 &(allocInfo->dedAlloc.nonSchdTxHqPLst));
26461 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cell);
26462 if ((allocInfo->dedAlloc.txRetxHqPLst.count +
26463 allocInfo->dedAlloc.retxHqPLst.count +
26464 allocInfo->dedAlloc.txHqPLst.count) >
26465 cmnCell->dl.maxUePerDlSf)
26467 #ifndef ALIGN_64BIT
26468 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26469 " scheduler exceed maximumUePerDlSf(%u)tx-retx %ld retx %ld tx %ld\n",
26470 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26471 allocInfo->dedAlloc.retxHqPLst.count,
26472 allocInfo->dedAlloc.txHqPLst.count));
26474 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26475 " scheduler exceed maximumUePerDlSf(%u)tx-retx %d retx %d tx %d\n",
26476 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26477 allocInfo->dedAlloc.retxHqPLst.count,
26478 allocInfo->dedAlloc.txHqPLst.count));
26483 /* LTE_ADV_FLAG_REMOVED_START */
26484 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
26486 printf("5GTF_ERROR RETX rgSCHCmnNonDlfsDsfrRntpComp\n");
26487 rgSCHCmnNonDlfsDsfrRntpComp(cell, allocInfo->dedAlloc.dedDlSf);
26489 /* LTE_ADV_FLAG_REMOVED_END */
26490 #endif /* LTE_TDD */
26494 /***********************************************************
26496 * Func : rgSCHCmnCalcRiv
26498 * Desc : This function calculates RIV.
26504 * File : rg_sch_utl.c
26506 **********************************************************/
26509 PUBLIC U32 rgSCHCmnCalcRiv
26516 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26523 PUBLIC U32 rgSCHCmnCalcRiv
26530 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26537 U8 numRbMinus1 = numRb - 1;
26540 TRC2(rgSCHCmnCalcRiv);
26542 if (numRbMinus1 <= bw/2)
26544 riv = bw * numRbMinus1 + rbStart;
26548 riv = bw * (bw - numRbMinus1) + (bw - rbStart - 1);
26551 } /* rgSCHCmnCalcRiv */
26555 * @brief This function allocates and copies the RACH response scheduling
26556 * related information into cell control block.
26560 * Function: rgSCHCmnDlCpyRachInfo
26561 * Purpose: This function allocates and copies the RACH response
26562 * scheduling related information into cell control block
26563 * for each DL subframe.
26566 * Invoked by: Scheduler
26568 * @param[in] RgSchCellCb* cell
26569 * @param[in] RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES]
26570 * @param[in] U8 raArrSz
26575 PRIVATE S16 rgSCHCmnDlCpyRachInfo
26578 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
26582 PRIVATE S16 rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz)
26584 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES];
26588 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
26597 TRC2(rgSCHCmnDlCpyRachInfo);
26599 /* Allocate RACH response information for each DL
26600 * subframe in a radio frame */
26601 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cell->rachRspLst,
26602 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1] *
26603 sizeof(RgSchTddRachRspLst));
26609 for(sfnIdx=raArrSz-1; sfnIdx>=0; sfnIdx--)
26611 for(subfrmIdx=0; subfrmIdx < RGSCH_NUM_SUB_FRAMES; subfrmIdx++)
26613 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
26614 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
26619 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx],subfrmIdx);
26621 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26623 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddNumDlSubfrmTbl[ulDlCfgIdx],subfrmIdx);
26624 sfNum = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][subfrmIdx]-1;
26625 numRfs = cell->rachRspLst[sfNum].numRadiofrms;
26626 /* For each DL subframe in which RACH response can
26627 * be sent is updated */
26630 cell->rachRspLst[sfNum].rachRsp[numRfs].sfnOffset =
26631 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset;
26632 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26634 cell->rachRspLst[sfNum].rachRsp[numRfs].\
26635 subframe[sfcount] =
26636 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].\
26639 cell->rachRspLst[sfNum].rachRsp[numRfs].numSubfrms =
26640 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26641 cell->rachRspLst[sfNum].numRadiofrms++;
26644 /* Copy the subframes to be deleted at ths subframe */
26646 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26649 cell->rachRspLst[sfNum].delInfo.sfnOffset =
26650 rachRspLst[sfnIdx][subfrmIdx].delInfo.sfnOffset;
26651 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26653 cell->rachRspLst[sfNum].delInfo.subframe[sfcount] =
26654 rachRspLst[sfnIdx][subfrmIdx].delInfo.subframe[sfcount];
26656 cell->rachRspLst[sfNum].delInfo.numSubfrms =
26657 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26665 * @brief This function determines the iTbs based on the new CFI,
26666 * CQI and BLER based delta iTbs
26670 * Function: rgSchCmnFetchItbs
26671 * Purpose: Fetch the new iTbs when CFI changes.
26673 * @param[in] RgSchCellCb *cell
26674 * @param[in] RgSchCmnDlUe *ueDl
26675 * @param[in] U8 cqi
26682 PRIVATE S32 rgSchCmnFetchItbs
26685 RgSchCmnDlUe *ueDl,
26693 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, subFrm, cqi, cfi, cwIdx, noLyr)
26695 RgSchCmnDlUe *ueDl;
26704 PRIVATE S32 rgSchCmnFetchItbs
26707 RgSchCmnDlUe *ueDl,
26714 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, cqi, cfi, cwIdx, noLyr)
26716 RgSchCmnDlUe *ueDl;
26725 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
26728 TRC2(rgSchCmnFetchItbs);
26731 /* Special Handling for Spl Sf when CFI is 3 as
26732 * CFI in Spl Sf will be max 2 */
26733 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
26735 if((cellDl->currCfi == 3) ||
26736 ((cell->bwCfg.dlTotalBw <= 10) && (cellDl->currCfi == 1)))
26738 /* Use CFI 2 in this case */
26739 iTbs = (ueDl->laCb[cwIdx].deltaiTbs +
26740 ((*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][2]))[cqi])* 100)/100;
26742 RG_SCH_CHK_ITBS_RANGE(iTbs, RGSCH_NUM_ITBS - 1);
26746 iTbs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1];
26748 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26750 else /* CFI Changed. Update with new iTbs Reset the BLER*/
26753 S32 tmpiTbs = (*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][cfi]))[cqi];
26755 iTbs = (ueDl->laCb[cwIdx].deltaiTbs + tmpiTbs*100)/100;
26757 RG_SCH_CHK_ITBS_RANGE(iTbs, tmpiTbs);
26759 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26761 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1] = iTbs;
26763 ueDl->lastCfi = cfi;
26764 ueDl->laCb[cwIdx].deltaiTbs = 0;
26771 * @brief This function determines the RBs and Bytes required for BO
26772 * transmission for UEs configured with TM 1/2/6/7.
26776 * Function: rgSCHCmnDlAllocTxRb1Tb1Cw
26777 * Purpose: Allocate TB1 on CW1.
26779 * Reference Parameter effBo is filled with alloced bytes.
26780 * Returns RFAILED if BO not satisfied at all.
26782 * Invoked by: rgSCHCmnDlAllocTxRbTM1/2/6/7
26784 * @param[in] RgSchCellCb *cell
26785 * @param[in] RgSchDlSf *subFrm
26786 * @param[in] RgSchUeCb *ue
26787 * @param[in] U32 bo
26788 * @param[out] U32 *effBo
26789 * @param[in] RgSchDlHqProcCb *proc
26790 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26795 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw
26802 RgSchDlHqProcCb *proc,
26803 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26806 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26812 RgSchDlHqProcCb *proc;
26813 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26816 RgSchDlRbAlloc *allocInfo;
26819 TRC2(rgSCHCmnDlAllocTxRb1Tb1Cw);
26822 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26824 if (ue->ue5gtfCb.rank == 2)
26826 allocInfo->dciFormat = TFU_DCI_FORMAT_B2;
26830 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26833 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26834 allocInfo->raType);
26836 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
26837 bo, &numRb, effBo);
26838 if (ret == RFAILED)
26840 /* If allocation couldn't be made then return */
26843 /* Adding UE to RbAllocInfo TX Lst */
26844 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
26845 /* Fill UE alloc Info */
26846 allocInfo->rbsReq = numRb;
26847 allocInfo->dlSf = subFrm;
26849 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26857 * @brief This function determines the RBs and Bytes required for BO
26858 * retransmission for UEs configured with TM 1/2/6/7.
26862 * Function: rgSCHCmnDlAllocRetxRb1Tb1Cw
26863 * Purpose: Allocate TB1 on CW1.
26865 * Reference Parameter effBo is filled with alloced bytes.
26866 * Returns RFAILED if BO not satisfied at all.
26868 * Invoked by: rgSCHCmnDlAllocRetxRbTM1/2/6/7
26870 * @param[in] RgSchCellCb *cell
26871 * @param[in] RgSchDlSf *subFrm
26872 * @param[in] RgSchUeCb *ue
26873 * @param[in] U32 bo
26874 * @param[out] U32 *effBo
26875 * @param[in] RgSchDlHqProcCb *proc
26876 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26881 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw
26888 RgSchDlHqProcCb *proc,
26889 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26892 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26898 RgSchDlHqProcCb *proc;
26899 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26902 RgSchDlRbAlloc *allocInfo;
26905 TRC2(rgSCHCmnDlAllocRetxRb1Tb1Cw);
26908 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26911 /* 5GTF: RETX DCI format same as TX */
26912 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26913 &allocInfo->raType);
26916 /* Get the Allocation in terms of RBs that are required for
26917 * this retx of TB1 */
26918 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
26920 if (ret == RFAILED)
26922 /* Allocation couldn't be made for Retx */
26923 /* Fix : syed If TxRetx allocation failed then add the UE along with the proc
26924 * to the nonSchdTxRetxUeLst and let spfc scheduler take care of it during
26926 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
26929 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
26930 /* Fill UE alloc Info */
26931 allocInfo->rbsReq = numRb;
26932 allocInfo->dlSf = subFrm;
26934 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26942 * @brief This function determines the RBs and Bytes required for BO
26943 * transmission for UEs configured with TM 2.
26947 * Function: rgSCHCmnDlAllocTxRbTM1
26950 * Reference Parameter effBo is filled with alloced bytes.
26951 * Returns RFAILED if BO not satisfied at all.
26953 * Invoked by: rgSCHCmnDlAllocTxRb
26955 * @param[in] RgSchCellCb *cell
26956 * @param[in] RgSchDlSf *subFrm
26957 * @param[in] RgSchUeCb *ue
26958 * @param[in] U32 bo
26959 * @param[out] U32 *effBo
26960 * @param[in] RgSchDlHqProcCb *proc
26961 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26966 PRIVATE Void rgSCHCmnDlAllocTxRbTM1
26973 RgSchDlHqProcCb *proc,
26974 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26977 PRIVATE Void rgSCHCmnDlAllocTxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26983 RgSchDlHqProcCb *proc;
26984 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26987 TRC2(rgSCHCmnDlAllocTxRbTM1);
26988 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
26994 * @brief This function determines the RBs and Bytes required for BO
26995 * retransmission for UEs configured with TM 2.
26999 * Function: rgSCHCmnDlAllocRetxRbTM1
27002 * Reference Parameter effBo is filled with alloced bytes.
27003 * Returns RFAILED if BO not satisfied at all.
27005 * Invoked by: rgSCHCmnDlAllocRetxRb
27007 * @param[in] RgSchCellCb *cell
27008 * @param[in] RgSchDlSf *subFrm
27009 * @param[in] RgSchUeCb *ue
27010 * @param[in] U32 bo
27011 * @param[out] U32 *effBo
27012 * @param[in] RgSchDlHqProcCb *proc
27013 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27018 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1
27025 RgSchDlHqProcCb *proc,
27026 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27029 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27035 RgSchDlHqProcCb *proc;
27036 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27039 TRC2(rgSCHCmnDlAllocRetxRbTM1);
27040 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27046 * @brief This function determines the RBs and Bytes required for BO
27047 * transmission for UEs configured with TM 2.
27051 * Function: rgSCHCmnDlAllocTxRbTM2
27054 * Reference Parameter effBo is filled with alloced bytes.
27055 * Returns RFAILED if BO not satisfied at all.
27057 * Invoked by: rgSCHCmnDlAllocTxRb
27059 * @param[in] RgSchCellCb *cell
27060 * @param[in] RgSchDlSf *subFrm
27061 * @param[in] RgSchUeCb *ue
27062 * @param[in] U32 bo
27063 * @param[out] U32 *effBo
27064 * @param[in] RgSchDlHqProcCb *proc
27065 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27070 PRIVATE Void rgSCHCmnDlAllocTxRbTM2
27077 RgSchDlHqProcCb *proc,
27078 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27081 PRIVATE Void rgSCHCmnDlAllocTxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27087 RgSchDlHqProcCb *proc;
27088 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27091 TRC2(rgSCHCmnDlAllocTxRbTM2);
27092 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27098 * @brief This function determines the RBs and Bytes required for BO
27099 * retransmission for UEs configured with TM 2.
27103 * Function: rgSCHCmnDlAllocRetxRbTM2
27106 * Reference Parameter effBo is filled with alloced bytes.
27107 * Returns RFAILED if BO not satisfied at all.
27109 * Invoked by: rgSCHCmnDlAllocRetxRb
27111 * @param[in] RgSchCellCb *cell
27112 * @param[in] RgSchDlSf *subFrm
27113 * @param[in] RgSchUeCb *ue
27114 * @param[in] U32 bo
27115 * @param[out] U32 *effBo
27116 * @param[in] RgSchDlHqProcCb *proc
27117 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27122 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2
27129 RgSchDlHqProcCb *proc,
27130 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27133 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27139 RgSchDlHqProcCb *proc;
27140 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27143 TRC2(rgSCHCmnDlAllocRetxRbTM2);
27144 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27150 * @brief This function determines the RBs and Bytes required for BO
27151 * transmission for UEs configured with TM 3.
27155 * Function: rgSCHCmnDlAllocTxRbTM3
27158 * Reference Parameter effBo is filled with alloced bytes.
27159 * Returns RFAILED if BO not satisfied at all.
27161 * Invoked by: rgSCHCmnDlAllocTxRb
27163 * @param[in] RgSchCellCb *cell
27164 * @param[in] RgSchDlSf *subFrm
27165 * @param[in] RgSchUeCb *ue
27166 * @param[in] U32 bo
27167 * @param[out] U32 *effBo
27168 * @param[in] RgSchDlHqProcCb *proc
27169 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27174 PRIVATE Void rgSCHCmnDlAllocTxRbTM3
27181 RgSchDlHqProcCb *proc,
27182 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27185 PRIVATE Void rgSCHCmnDlAllocTxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27191 RgSchDlHqProcCb *proc;
27192 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27196 TRC2(rgSCHCmnDlAllocTxRbTM3);
27198 /* Both TBs free for TX allocation */
27199 rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo,\
27200 proc, cellWdAllocInfo);
27207 * @brief This function determines the RBs and Bytes required for BO
27208 * retransmission for UEs configured with TM 3.
27212 * Function: rgSCHCmnDlAllocRetxRbTM3
27215 * Reference Parameter effBo is filled with alloced bytes.
27216 * Returns RFAILED if BO not satisfied at all.
27218 * Invoked by: rgSCHCmnDlAllocRetxRb
27220 * @param[in] RgSchCellCb *cell
27221 * @param[in] RgSchDlSf *subFrm
27222 * @param[in] RgSchUeCb *ue
27223 * @param[in] U32 bo
27224 * @param[out] U32 *effBo
27225 * @param[in] RgSchDlHqProcCb *proc
27226 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27231 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3
27238 RgSchDlHqProcCb *proc,
27239 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27242 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27248 RgSchDlHqProcCb *proc;
27249 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27253 TRC2(rgSCHCmnDlAllocRetxRbTM3);
27255 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
27256 (proc->tbInfo[1].state == HQ_TB_NACKED))
27259 printf ("RETX RB TM3 nack for both hqp %d cell %d \n", proc->procId, proc->hqE->cell->cellId);
27261 /* Both TBs require RETX allocation */
27262 rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo,\
27263 proc, cellWdAllocInfo);
27267 /* One of the TBs need RETX allocation. Other TB may/maynot
27268 * be available for new TX allocation. */
27269 rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo,\
27270 proc, cellWdAllocInfo);
27278 * @brief This function performs the DCI format selection in case of
27279 * Transmit Diversity scheme where there can be more
27280 * than 1 option for DCI format selection.
27284 * Function: rgSCHCmnSlctPdcchFrmt
27285 * Purpose: 1. If DLFS is enabled, then choose TM specific
27286 * DCI format for Transmit diversity. All the
27287 * TM Specific DCI Formats support Type0 and/or
27288 * Type1 resource allocation scheme. DLFS
27289 * supports only Type-0&1 Resource allocation.
27290 * 2. If DLFS is not enabled, select a DCI format
27291 * which is of smaller size. Since Non-DLFS
27292 * scheduler supports all Resource allocation
27293 * schemes, selection is based on efficiency.
27295 * Invoked by: DL UE Allocation by Common Scheduler.
27297 * @param[in] RgSchCellCb *cell
27298 * @param[in] RgSchUeCb *ue
27299 * @param[out] U8 *raType
27300 * @return TfuDciFormat
27304 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt
27311 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt(cell, ue, raType)
27317 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
27319 TRC2(rgSCHCmnSlctPdcchFrmt);
27321 /* ccpu00140894- Selective DCI Format and RA type should be selected only
27322 * after TX Mode transition is completed*/
27323 if ((cellSch->dl.isDlFreqSel) && (ue->txModeTransCmplt))
27325 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciRAType;
27326 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciFrmt);
27330 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciRAType;
27331 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciFrmt);
27337 * @brief This function handles Retx allocation in case of TM3 UEs
27338 * where both the TBs were NACKED previously.
27342 * Function: rgSCHCmnDlTM3RetxRetx
27343 * Purpose: If forceTD flag enabled
27344 * TD for TB1 on CW1.
27346 * DCI Frmt 2A and RA Type 0
27347 * RI layered SM of both TBs on 2 CWs
27348 * Add UE to cell Alloc Info.
27349 * Fill UE alloc Info.
27352 * Successful allocation is indicated by non-zero effBo value.
27354 * Invoked by: rgSCHCmnDlAllocRbTM3
27356 * @param[in] RgSchCellCb *cell
27357 * @param[in] RgSchDlSf *subFrm
27358 * @param[in] RgSchUeCb *ue
27359 * @param[in] U32 bo
27360 * @param[out] U32 *effBo
27361 * @param[in] RgSchDlHqProcCb *proc
27362 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27367 PRIVATE Void rgSCHCmnDlTM3RetxRetx
27374 RgSchDlHqProcCb *proc,
27375 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27378 PRIVATE Void rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27384 RgSchDlHqProcCb *proc;
27385 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27389 RgSchDlRbAlloc *allocInfo;
27396 TRC2(rgSCHCmnDlTM3RetxRetx);
27399 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27401 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27403 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
27404 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27406 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27408 if (ret == RFAILED)
27410 /* Allocation couldn't be made for Retx */
27411 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27414 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27415 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27416 #ifdef FOUR_TX_ANTENNA
27417 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1 should
27418 * have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27419 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27422 proc->cwSwpEnabled = TRUE;
27425 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27426 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27430 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27433 /* Adding UE to allocInfo RETX Lst */
27434 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27436 /* Fill UE alloc Info scratch pad */
27437 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27438 precInfo, noTxLyrs, subFrm);
27445 * @brief This function handles Retx allocation in case of TM4 UEs
27446 * where both the TBs were NACKED previously.
27450 * Function: rgSCHCmnDlTM4RetxRetx
27451 * Purpose: If forceTD flag enabled
27452 * TD for TB1 on CW1.
27454 * DCI Frmt 2 and RA Type 0
27456 * 1 layer SM of TB1 on CW1.
27458 * RI layered SM of both TBs on 2 CWs
27459 * Add UE to cell Alloc Info.
27460 * Fill UE alloc Info.
27463 * Successful allocation is indicated by non-zero effBo value.
27465 * Invoked by: rgSCHCmnDlAllocRbTM4
27467 * @param[in] RgSchCellCb *cell
27468 * @param[in] RgSchDlSf *subFrm
27469 * @param[in] RgSchUeCb *ue
27470 * @param[in] U32 bo
27471 * @param[out] U32 *effBo
27472 * @param[in] RgSchDlHqProcCb *proc
27473 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27478 PRIVATE Void rgSCHCmnDlTM4RetxRetx
27485 RgSchDlHqProcCb *proc,
27486 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27489 PRIVATE Void rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27495 RgSchDlHqProcCb *proc;
27496 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27500 RgSchDlRbAlloc *allocInfo;
27502 Bool swpFlg = FALSE;
27504 #ifdef FOUR_TX_ANTENNA
27509 TRC2(rgSCHCmnDlTM4RetxRetx);
27512 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27514 /* Irrespective of RI Schedule both CWs */
27515 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
27516 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27518 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27520 if (ret == RFAILED)
27522 /* Allocation couldn't be made for Retx */
27523 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27526 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27528 #ifdef FOUR_TX_ANTENNA
27529 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1
27530 * should have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27531 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27534 proc->cwSwpEnabled = TRUE;
27536 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27537 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27541 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27544 /* Adding UE to allocInfo RETX Lst */
27545 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27547 /* Fill UE alloc Info scratch pad */
27548 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27549 precInfo, noTxLyrs, subFrm);
27557 * @brief This function determines Transmission attributes
27558 * incase of Spatial multiplexing for TX and RETX TBs.
27562 * Function: rgSCHCmnDlSMGetAttrForTxRetx
27563 * Purpose: 1. Reached here for a TM3/4 UE's HqP whose one of the TBs is
27564 * NACKED and the other TB is either NACKED or WAITING.
27565 * 2. Select the NACKED TB for RETX allocation.
27566 * 3. Allocation preference for RETX TB by mapping it to a better
27567 * CW (better in terms of efficiency).
27568 * 4. Determine the state of the other TB.
27569 * Determine if swapFlag were to be set.
27570 * Swap flag would be set if Retx TB is cross
27572 * 5. If UE has new data available for TX and if the other TB's state
27573 * is ACKED then set furtherScope as TRUE.
27575 * Invoked by: rgSCHCmnDlTM3[4]TxRetx
27577 * @param[in] RgSchUeCb *ue
27578 * @param[in] RgSchDlHqProcCb *proc
27579 * @param[out] RgSchDlHqTbCb **retxTb
27580 * @param[out] RgSchDlHqTbCb **txTb
27581 * @param[out] Bool *frthrScp
27582 * @param[out] Bool *swpFlg
27587 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx
27590 RgSchDlHqProcCb *proc,
27591 RgSchDlHqTbCb **retxTb,
27592 RgSchDlHqTbCb **txTb,
27597 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, frthrScp,\
27600 RgSchDlHqProcCb *proc;
27601 RgSchDlHqTbCb **retxTb;
27602 RgSchDlHqTbCb **txTb;
27607 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,proc->hqE->cell);
27608 RgSchDlRbAlloc *allocInfo;
27610 TRC2(rgSCHCmnDlSMGetAttrForTxRetx);
27612 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27614 *retxTb = &proc->tbInfo[0];
27615 *txTb = &proc->tbInfo[1];
27616 /* TENB_BRDCM_TM4- Currently disabling swapflag for TM3/TM4, since
27617 * HqFeedback processing does not consider a swapped hq feedback */
27618 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 1))
27621 proc->cwSwpEnabled = TRUE;
27623 if (proc->tbInfo[1].state == HQ_TB_ACKED)
27625 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27626 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27631 *retxTb = &proc->tbInfo[1];
27632 *txTb = &proc->tbInfo[0];
27633 /* TENB_BRDCM_TM4 - Currently disabling swapflag for TM3/TM4, since
27634 * HqFeedback processing does not consider a swapped hq feedback */
27635 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 0))
27638 proc->cwSwpEnabled = TRUE;
27640 if (proc->tbInfo[0].state == HQ_TB_ACKED)
27642 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27643 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27651 * @brief Determine Precoding information for TM3 2 TX Antenna.
27655 * Function: rgSCHCmnDlTM3PrecInf2
27658 * Invoked by: rgSCHCmnDlGetAttrForTM3
27660 * @param[in] RgSchUeCb *ue
27661 * @param[in] U8 numTxLyrs
27662 * @param[in] Bool bothCwEnbld
27667 PRIVATE U8 rgSCHCmnDlTM3PrecInf2
27675 PRIVATE U8 rgSCHCmnDlTM3PrecInf2(ue, numTxLyrs, bothCwEnbld)
27682 TRC2(rgSCHCmnDlTM3PrecInf2);
27689 * @brief Determine Precoding information for TM4 2 TX Antenna.
27693 * Function: rgSCHCmnDlTM4PrecInf2
27694 * Purpose: To determine a logic of deriving precoding index
27695 * information from 36.212 table 5.3.3.1.5-4
27697 * Invoked by: rgSCHCmnDlGetAttrForTM4
27699 * @param[in] RgSchUeCb *ue
27700 * @param[in] U8 numTxLyrs
27701 * @param[in] Bool bothCwEnbld
27706 PRIVATE U8 rgSCHCmnDlTM4PrecInf2
27714 PRIVATE U8 rgSCHCmnDlTM4PrecInf2(ue, numTxLyrs, bothCwEnbld)
27721 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27724 TRC2(rgSCHCmnDlTM4PrecInf2);
27726 if (ueDl->mimoInfo.ri == numTxLyrs)
27728 if (ueDl->mimoInfo.ri == 2)
27730 /* PrecInfo corresponding to 2 CW
27732 if (ue->mimoInfo.puschFdbkVld)
27738 precIdx = ueDl->mimoInfo.pmi - 1;
27743 /* PrecInfo corresponding to 1 CW
27745 if (ue->mimoInfo.puschFdbkVld)
27751 precIdx = ueDl->mimoInfo.pmi + 1;
27755 else if (ueDl->mimoInfo.ri > numTxLyrs)
27757 /* In case of choosing among the columns of a
27758 * precoding matrix, choose the column corresponding
27759 * to the MAX-CQI */
27760 if (ue->mimoInfo.puschFdbkVld)
27766 precIdx = (ueDl->mimoInfo.pmi- 1)* 2 + 1;
27769 else /* if RI < numTxLyrs */
27771 precIdx = (ueDl->mimoInfo.pmi < 2)? 0:1;
27778 * @brief Determine Precoding information for TM3 4 TX Antenna.
27782 * Function: rgSCHCmnDlTM3PrecInf4
27783 * Purpose: To determine a logic of deriving precoding index
27784 * information from 36.212 table 5.3.3.1.5A-2
27786 * Invoked by: rgSCHCmnDlGetAttrForTM3
27788 * @param[in] RgSchUeCb *ue
27789 * @param[in] U8 numTxLyrs
27790 * @param[in] Bool bothCwEnbld
27795 PRIVATE U8 rgSCHCmnDlTM3PrecInf4
27803 PRIVATE U8 rgSCHCmnDlTM3PrecInf4(ue, numTxLyrs, bothCwEnbld)
27812 TRC2(rgSCHCmnDlTM3PrecInf4);
27816 precIdx = numTxLyrs - 2;
27818 else /* one 1 CW transmission */
27827 * @brief Determine Precoding information for TM4 4 TX Antenna.
27831 * Function: rgSCHCmnDlTM4PrecInf4
27832 * Purpose: To determine a logic of deriving precoding index
27833 * information from 36.212 table 5.3.3.1.5-5
27835 * Invoked by: rgSCHCmnDlGetAttrForTM4
27837 * @param[in] RgSchUeCb *ue
27838 * @param[in] U8 numTxLyrs
27839 * @param[in] Bool bothCwEnbld
27844 PRIVATE U8 rgSCHCmnDlTM4PrecInf4
27852 PRIVATE U8 rgSCHCmnDlTM4PrecInf4(cell, ue, numTxLyrs, bothCwEnbld)
27859 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27860 U8 precInfoBaseIdx, precIdx;
27862 TRC2(rgSCHCmnDlTM4PrecInf4);
27864 precInfoBaseIdx = (ue->mimoInfo.puschFdbkVld)? (16):
27865 (ueDl->mimoInfo.pmi);
27868 precIdx = precInfoBaseIdx + (numTxLyrs-2)*17;
27870 else /* one 1 CW transmission */
27872 precInfoBaseIdx += 1;
27873 precIdx = precInfoBaseIdx + (numTxLyrs-1)*17;
27880 * @brief This function determines Transmission attributes
27881 * incase of TM3 scheduling.
27885 * Function: rgSCHCmnDlGetAttrForTM3
27886 * Purpose: Determine retx TB and tx TB based on TB states.
27887 * If forceTD enabled
27888 * perform only retx TB allocation.
27889 * If retxTB == TB2 then DCI Frmt = 2A, RA Type = 0.
27890 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
27892 * perform retxTB allocation on CW1.
27894 * Determine further Scope and Swap Flag attributes
27895 * assuming a 2 CW transmission of RetxTB and new Tx TB.
27896 * If no further scope for new TX allocation
27897 * Allocate only retx TB using 2 layers if
27898 * this TB was previously transmitted using 2 layers AND
27899 * number of Tx antenna ports == 4.
27900 * otherwise do single layer precoding.
27902 * Invoked by: rgSCHCmnDlTM3TxRetx
27904 * @param[in] RgSchUeCb *ue
27905 * @param[in] RgSchDlHqProcCb *proc
27906 * @param[out] U8 *numTxLyrs
27907 * @param[out] Bool *isTraDiv
27908 * @param[out] U8 *prcdngInf
27909 * @param[out] U8 *raType
27914 PRIVATE Void rgSCHCmnDlGetAttrForTM3
27918 RgSchDlHqProcCb *proc,
27920 TfuDciFormat *dciFrmt,
27922 RgSchDlHqTbCb **retxTb,
27923 RgSchDlHqTbCb **txTb,
27929 PRIVATE Void rgSCHCmnDlGetAttrForTM3(cell, ue, proc, numTxLyrs, dciFrmt,\
27930 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
27933 RgSchDlHqProcCb *proc;
27935 TfuDciFormat *dciFrmt;
27937 RgSchDlHqTbCb **retxTb;
27938 RgSchDlHqTbCb **txTb;
27944 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27947 TRC2(rgSCHCmnDlGetAttrForTM3);
27949 /* Avoiding Tx-Retx for LAA cell as firstSchedTime is associated with
27951 /* Integration_fix: SPS Proc shall always have only one Cw */
27953 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
27954 (ueDl->mimoInfo.forceTD))
27956 ||(TRUE == rgSCHLaaSCellEnabled(cell))
27960 if ((ueDl->mimoInfo.forceTD)
27962 || (TRUE == rgSCHLaaSCellEnabled(cell))
27967 /* Transmit Diversity. Format based on dlfsEnabled
27968 * No further scope */
27969 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27971 *retxTb = &proc->tbInfo[0];
27972 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
27976 *retxTb = &proc->tbInfo[1];
27977 *dciFrmt = TFU_DCI_FORMAT_2A;
27978 *raType = RG_SCH_CMN_RA_TYPE0;
27986 /* Determine the 2 TB transmission attributes */
27987 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
27991 /* Prefer allocation of RETX TB over 2 layers rather than combining
27992 * it with a new TX. */
27993 if ((ueDl->mimoInfo.ri == 2)
27994 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
27996 /* Allocate TB on CW1, using 2 Lyrs,
27997 * Format 2, precoding accordingly */
28003 *numTxLyrs= ((*retxTb)->numLyrs + ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)].noLyr);
28005 if((*retxTb)->tbIdx == 0 && ((*retxTb)->numLyrs == 2 ) && *numTxLyrs ==3)
28008 proc->cwSwpEnabled = TRUE;
28010 else if((*retxTb)->tbIdx == 1 && ((*retxTb)->numLyrs == 1) && *numTxLyrs ==3)
28013 proc->cwSwpEnabled = TRUE;
28017 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28018 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])\
28019 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28020 *dciFrmt = TFU_DCI_FORMAT_2A;
28021 *raType = RG_SCH_CMN_RA_TYPE0;
28023 else /* frthrScp == FALSE */
28025 if (cell->numTxAntPorts == 2)
28027 /* Transmit Diversity */
28029 if ((*retxTb)->tbIdx == 0)
28031 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28035 /* If retxTB is TB2 then use format 2A */
28036 *dciFrmt = TFU_DCI_FORMAT_2A;
28037 *raType = RG_SCH_CMN_RA_TYPE0;
28042 else /* NumAntPorts == 4 */
28044 if ((*retxTb)->numLyrs == 2)
28046 /* Allocate TB on CW1, using 2 Lyrs,
28047 * Format 2A, precoding accordingly */
28049 *dciFrmt = TFU_DCI_FORMAT_2A;
28050 *raType = RG_SCH_CMN_RA_TYPE0;
28051 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28052 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, *numTxLyrs, *frthrScp);
28057 /* Transmit Diversity */
28059 if ((*retxTb)->tbIdx == 0)
28061 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28065 /* If retxTB is TB2 then use format 2A */
28066 *dciFrmt = TFU_DCI_FORMAT_2A;
28067 *raType = RG_SCH_CMN_RA_TYPE0;
28081 * @brief This function determines Transmission attributes
28082 * incase of TM4 scheduling.
28086 * Function: rgSCHCmnDlGetAttrForTM4
28087 * Purpose: Determine retx TB and tx TB based on TB states.
28088 * If forceTD enabled
28089 * perform only retx TB allocation.
28090 * If retxTB == TB2 then DCI Frmt = 2, RA Type = 0.
28091 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
28093 * perform retxTB allocation on CW1.
28095 * Determine further Scope and Swap Flag attributes
28096 * assuming a 2 CW transmission of RetxTB and new Tx TB.
28097 * If no further scope for new TX allocation
28098 * Allocate only retx TB using 2 layers if
28099 * this TB was previously transmitted using 2 layers AND
28100 * number of Tx antenna ports == 4.
28101 * otherwise do single layer precoding.
28103 * Invoked by: rgSCHCmnDlTM4TxRetx
28105 * @param[in] RgSchUeCb *ue
28106 * @param[in] RgSchDlHqProcCb *proc
28107 * @param[out] U8 *numTxLyrs
28108 * @param[out] Bool *isTraDiv
28109 * @param[out] U8 *prcdngInf
28110 * @param[out] U8 *raType
28115 PRIVATE Void rgSCHCmnDlGetAttrForTM4
28119 RgSchDlHqProcCb *proc,
28121 TfuDciFormat *dciFrmt,
28123 RgSchDlHqTbCb **retxTb,
28124 RgSchDlHqTbCb **txTb,
28130 PRIVATE Void rgSCHCmnDlGetAttrForTM4(cell, ue, proc, numTxLyrs, dciFrmt,\
28131 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
28134 RgSchDlHqProcCb *proc;
28136 TfuDciFormat *dciFrmt;
28138 RgSchDlHqTbCb **retxTb;
28139 RgSchDlHqTbCb **txTb;
28145 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28148 TRC2(rgSCHCmnDlGetAttrForTM4);
28151 /* Integration_fix: SPS Proc shall always have only one Cw */
28153 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28154 (ueDl->mimoInfo.forceTD))
28156 ||(TRUE == rgSCHLaaSCellEnabled(cell))
28160 if ((ueDl->mimoInfo.forceTD)
28162 || (TRUE == rgSCHLaaSCellEnabled(cell))
28167 /* Transmit Diversity. Format based on dlfsEnabled
28168 * No further scope */
28169 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28171 *retxTb = &proc->tbInfo[0];
28172 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28176 *retxTb = &proc->tbInfo[1];
28177 *dciFrmt = TFU_DCI_FORMAT_2;
28178 *raType = RG_SCH_CMN_RA_TYPE0;
28186 if (ueDl->mimoInfo.ri == 1)
28188 /* single layer precoding. Format 2.
28189 * No further scope */
28190 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28192 *retxTb = &proc->tbInfo[0];
28196 *retxTb = &proc->tbInfo[1];
28199 *dciFrmt = TFU_DCI_FORMAT_2;
28200 *raType = RG_SCH_CMN_RA_TYPE0;
28202 *prcdngInf = 0; /*When RI= 1*/
28206 /* Determine the 2 TB transmission attributes */
28207 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
28209 *dciFrmt = TFU_DCI_FORMAT_2;
28210 *raType = RG_SCH_CMN_RA_TYPE0;
28213 /* Prefer allocation of RETX TB over 2 layers rather than combining
28214 * it with a new TX. */
28215 if ((ueDl->mimoInfo.ri == 2)
28216 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
28218 /* Allocate TB on CW1, using 2 Lyrs,
28219 * Format 2, precoding accordingly */
28223 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28224 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])
28225 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28227 else /* frthrScp == FALSE */
28229 if (cell->numTxAntPorts == 2)
28231 /* single layer precoding. Format 2. */
28233 *prcdngInf = (getPrecInfoFunc[1][cell->numTxAntPorts/2 - 1])\
28234 (cell, ue, *numTxLyrs, *frthrScp);
28237 else /* NumAntPorts == 4 */
28239 if ((*retxTb)->numLyrs == 2)
28241 /* Allocate TB on CW1, using 2 Lyrs,
28242 * Format 2, precoding accordingly */
28244 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28245 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28246 (cell, ue, *numTxLyrs, *frthrScp);
28251 /* Allocate TB with 1 lyr precoding,
28252 * Format 2, precoding info accordingly */
28254 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28255 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28256 (cell, ue, *numTxLyrs, *frthrScp);
28267 * @brief This function handles Retx allocation in case of TM3 UEs
28268 * where previously one of the TBs was NACKED and the other
28269 * TB is either ACKED/WAITING.
28273 * Function: rgSCHCmnDlTM3TxRetx
28274 * Purpose: Determine the TX attributes for TM3 TxRetx Allocation.
28275 * If futher Scope for New Tx Allocation on other TB
28276 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28277 * Add UE to cell wide RetxTx List.
28279 * Perform only RETX alloc'n on CW1.
28280 * Add UE to cell wide Retx List.
28282 * effBo is set to a non-zero value if allocation is
28285 * Invoked by: rgSCHCmnDlAllocRbTM3
28287 * @param[in] RgSchCellCb *cell
28288 * @param[in] RgSchDlSf *subFrm
28289 * @param[in] RgSchUeCb *ue
28290 * @param[in] U32 bo
28291 * @param[out] U32 *effBo
28292 * @param[in] RgSchDlHqProcCb *proc
28293 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28298 PRIVATE Void rgSCHCmnDlTM3TxRetx
28305 RgSchDlHqProcCb *proc,
28306 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28309 PRIVATE Void rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28315 RgSchDlHqProcCb *proc;
28316 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28320 RgSchDlRbAlloc *allocInfo;
28322 RgSchDlHqTbCb *retxTb, *txTb;
28328 TRC2(rgSCHCmnDlTM3TxRetx);
28332 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28335 /* Determine the transmission attributes */
28336 rgSCHCmnDlGetAttrForTM3(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28337 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28338 &allocInfo->raType);
28343 printf ("TX RETX called from proc %d cell %d \n",proc->procId, cell->cellId);
28345 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28347 if (ret == RFAILED)
28349 /* Allocation couldn't be made for Retx */
28350 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28353 /* Adding UE to RbAllocInfo RETX-TX Lst */
28354 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28358 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28359 numTxLyrs, &numRb, effBo);
28360 if (ret == RFAILED)
28362 /* Allocation couldn't be made for Retx */
28363 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28367 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28370 /* Adding UE to allocInfo RETX Lst */
28371 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28374 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28375 prcdngInf, numTxLyrs, subFrm);
28382 * @brief This function handles Retx allocation in case of TM4 UEs
28383 * where previously one of the TBs was NACKED and the other
28384 * TB is either ACKED/WAITING.
28388 * Function: rgSCHCmnDlTM4TxRetx
28389 * Purpose: Determine the TX attributes for TM4 TxRetx Allocation.
28390 * If futher Scope for New Tx Allocation on other TB
28391 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28392 * Add UE to cell wide RetxTx List.
28394 * Perform only RETX alloc'n on CW1.
28395 * Add UE to cell wide Retx List.
28397 * effBo is set to a non-zero value if allocation is
28400 * Invoked by: rgSCHCmnDlAllocRbTM4
28402 * @param[in] RgSchCellCb *cell
28403 * @param[in] RgSchDlSf *subFrm
28404 * @param[in] RgSchUeCb *ue
28405 * @param[in] U32 bo
28406 * @param[out] U32 *effBo
28407 * @param[in] RgSchDlHqProcCb *proc
28408 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28413 PRIVATE Void rgSCHCmnDlTM4TxRetx
28420 RgSchDlHqProcCb *proc,
28421 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28424 PRIVATE Void rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28430 RgSchDlHqProcCb *proc;
28431 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28435 RgSchDlRbAlloc *allocInfo;
28437 RgSchDlHqTbCb *retxTb, *txTb;
28443 TRC2(rgSCHCmnDlTM4TxRetx);
28446 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28449 /* Determine the transmission attributes */
28450 rgSCHCmnDlGetAttrForTM4(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28451 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28452 &allocInfo->raType);
28456 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28458 if (ret == RFAILED)
28460 /* Fix : syed If TxRetx allocation failed then add the UE along
28461 * with the proc to the nonSchdTxRetxUeLst and let spfc scheduler
28462 * take care of it during finalization. */
28463 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28466 /* Adding UE to RbAllocInfo RETX-TX Lst */
28467 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28471 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28472 numTxLyrs, &numRb, effBo);
28473 if (ret == RFAILED)
28475 /* Allocation couldn't be made for Retx */
28476 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28480 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28483 /* Adding UE to allocInfo RETX Lst */
28484 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28487 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28488 prcdngInf, numTxLyrs, subFrm)
28495 * @brief This function handles Retx allocation in case of TM4 UEs
28496 * where previously both the TBs were ACKED and ACKED
28501 * Function: rgSCHCmnDlTM3TxTx
28502 * Purpose: Reached here for a TM3 UE's HqP's fresh allocation
28503 * where both the TBs are free for TX scheduling.
28504 * If forceTD flag is set
28505 * perform TD on CW1 with TB1.
28510 * RI layered precoding 2 TB on 2 CW.
28511 * Set precoding info.
28512 * Add UE to cellAllocInfo.
28513 * Fill ueAllocInfo.
28515 * effBo is set to a non-zero value if allocation is
28518 * Invoked by: rgSCHCmnDlAllocRbTM3
28520 * @param[in] RgSchCellCb *cell
28521 * @param[in] RgSchDlSf *subFrm
28522 * @param[in] RgSchUeCb *ue
28523 * @param[in] U32 bo
28524 * @param[out] U32 *effBo
28525 * @param[in] RgSchDlHqProcCb *proc
28526 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28531 PRIVATE Void rgSCHCmnDlTM3TxTx
28538 RgSchDlHqProcCb *proc,
28539 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28542 PRIVATE Void rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28548 RgSchDlHqProcCb *proc;
28549 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28552 RgSchCmnDlUe *ueDl;
28553 RgSchDlRbAlloc *allocInfo;
28560 TRC2(rgSCHCmnDlTM3TxTx);
28563 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28564 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28566 /* Integration_fix: SPS Proc shall always have only one Cw */
28568 #ifdef FOUR_TX_ANTENNA
28569 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28570 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28572 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28573 (ueDl->mimoInfo.forceTD))
28576 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28579 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28580 &allocInfo->raType);
28581 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28582 bo, &numRb, effBo);
28583 if (ret == RFAILED)
28585 /* If allocation couldn't be made then return */
28589 precInfo = 0; /* TD */
28591 else /* Precoding */
28593 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
28594 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28596 /* Spatial Multiplexing using 2 CWs */
28597 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28598 if (ret == RFAILED)
28600 /* If allocation couldn't be made then return */
28603 noTxLyrs = ueDl->mimoInfo.ri;
28604 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28605 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, getPrecInfoFunc[0], precInfoAntIdx);
28606 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28610 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28613 /* Adding UE to RbAllocInfo TX Lst */
28614 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28616 /* Fill UE allocInfo scrath pad */
28617 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28618 precInfo, noTxLyrs, subFrm);
28625 * @brief This function handles Retx allocation in case of TM4 UEs
28626 * where previously both the TBs were ACKED and ACKED
28631 * Function: rgSCHCmnDlTM4TxTx
28632 * Purpose: Reached here for a TM4 UE's HqP's fresh allocation
28633 * where both the TBs are free for TX scheduling.
28634 * If forceTD flag is set
28635 * perform TD on CW1 with TB1.
28641 * Single layer precoding of TB1 on CW1.
28642 * Set precoding info.
28644 * RI layered precoding 2 TB on 2 CW.
28645 * Set precoding info.
28646 * Add UE to cellAllocInfo.
28647 * Fill ueAllocInfo.
28649 * effBo is set to a non-zero value if allocation is
28652 * Invoked by: rgSCHCmnDlAllocRbTM4
28654 * @param[in] RgSchCellCb *cell
28655 * @param[in] RgSchDlSf *subFrm
28656 * @param[in] RgSchUeCb *ue
28657 * @param[in] U32 bo
28658 * @param[out] U32 *effBo
28659 * @param[in] RgSchDlHqProcCb *proc
28660 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28665 PRIVATE Void rgSCHCmnDlTM4TxTx
28672 RgSchDlHqProcCb *proc,
28673 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28676 PRIVATE Void rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28682 RgSchDlHqProcCb *proc;
28683 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28686 RgSchCmnDlUe *ueDl;
28687 RgSchDlRbAlloc *allocInfo;
28694 TRC2(rgSCHCmnDlTM4TxTx);
28697 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28698 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28700 /* Integration_fix: SPS Proc shall always have only one Cw */
28702 #ifdef FOUR_TX_ANTENNA
28703 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28704 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28706 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28707 (ueDl->mimoInfo.forceTD))
28710 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28713 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28714 &allocInfo->raType);
28716 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28717 bo, &numRb, effBo);
28718 if (ret == RFAILED)
28720 /* If allocation couldn't be made then return */
28724 precInfo = 0; /* TD */
28726 else /* Precoding */
28728 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
28729 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28731 if (ueDl->mimoInfo.ri == 1)
28733 /* Single Layer SM using FORMAT 2 */
28734 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28735 bo, &numRb, effBo);
28736 if (ret == RFAILED)
28738 /* If allocation couldn't be made then return */
28742 precInfo = 0; /* PrecInfo as 0 for RI=1*/
28746 /* Spatial Multiplexing using 2 CWs */
28747 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28748 if (ret == RFAILED)
28750 /* If allocation couldn't be made then return */
28753 noTxLyrs = ueDl->mimoInfo.ri;
28754 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28755 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28761 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28764 /* Adding UE to RbAllocInfo TX Lst */
28765 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28768 /* Fill UE allocInfo scrath pad */
28769 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28770 precInfo, noTxLyrs, subFrm);
28777 * @brief This function determines the RBs and Bytes required for BO
28778 * transmission for UEs configured with TM 4.
28782 * Function: rgSCHCmnDlAllocTxRbTM4
28783 * Purpose: Invokes the functionality particular to the
28784 * current state of the TBs of the "proc".
28786 * Reference Parameter effBo is filled with alloced bytes.
28787 * Returns RFAILED if BO not satisfied at all.
28789 * Invoked by: rgSCHCmnDlAllocTxRb
28791 * @param[in] RgSchCellCb *cell
28792 * @param[in] RgSchDlSf *subFrm
28793 * @param[in] RgSchUeCb *ue
28794 * @param[in] U32 bo
28795 * @param[out] U32 *effBo
28796 * @param[in] RgSchDlHqProcCb *proc
28797 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28802 PRIVATE Void rgSCHCmnDlAllocTxRbTM4
28809 RgSchDlHqProcCb *proc,
28810 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28813 PRIVATE Void rgSCHCmnDlAllocTxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28819 RgSchDlHqProcCb *proc;
28820 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28823 TRC2(rgSCHCmnDlAllocTxRbTM4);
28825 /* Both TBs free for TX allocation */
28826 rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo,\
28827 proc, cellWdAllocInfo);
28834 * @brief This function determines the RBs and Bytes required for BO
28835 * retransmission for UEs configured with TM 4.
28839 * Function: rgSCHCmnDlAllocRetxRbTM4
28840 * Purpose: Invokes the functionality particular to the
28841 * current state of the TBs of the "proc".
28843 * Reference Parameter effBo is filled with alloced bytes.
28844 * Returns RFAILED if BO not satisfied at all.
28846 * Invoked by: rgSCHCmnDlAllocRetxRb
28848 * @param[in] RgSchCellCb *cell
28849 * @param[in] RgSchDlSf *subFrm
28850 * @param[in] RgSchUeCb *ue
28851 * @param[in] U32 bo
28852 * @param[out] U32 *effBo
28853 * @param[in] RgSchDlHqProcCb *proc
28854 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28859 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4
28866 RgSchDlHqProcCb *proc,
28867 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28870 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28876 RgSchDlHqProcCb *proc;
28877 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28880 TRC2(rgSCHCmnDlAllocRetxRbTM4);
28882 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
28883 (proc->tbInfo[1].state == HQ_TB_NACKED))
28885 /* Both TBs require RETX allocation */
28886 rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo,\
28887 proc, cellWdAllocInfo);
28891 /* One of the TBs need RETX allocation. Other TB may/maynot
28892 * be available for new TX allocation. */
28893 rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo,\
28894 proc, cellWdAllocInfo);
28903 * @brief This function determines the RBs and Bytes required for BO
28904 * transmission for UEs configured with TM 5.
28908 * Function: rgSCHCmnDlAllocTxRbTM5
28911 * Reference Parameter effBo is filled with alloced bytes.
28912 * Returns RFAILED if BO not satisfied at all.
28914 * Invoked by: rgSCHCmnDlAllocTxRb
28916 * @param[in] RgSchCellCb *cell
28917 * @param[in] RgSchDlSf *subFrm
28918 * @param[in] RgSchUeCb *ue
28919 * @param[in] U32 bo
28920 * @param[out] U32 *effBo
28921 * @param[in] RgSchDlHqProcCb *proc
28922 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28927 PRIVATE Void rgSCHCmnDlAllocTxRbTM5
28934 RgSchDlHqProcCb *proc,
28935 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28938 PRIVATE Void rgSCHCmnDlAllocTxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28944 RgSchDlHqProcCb *proc;
28945 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28948 TRC2(rgSCHCmnDlAllocTxRbTM5);
28949 #if (ERRCLASS & ERRCLS_DEBUG)
28950 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
28957 * @brief This function determines the RBs and Bytes required for BO
28958 * retransmission for UEs configured with TM 5.
28962 * Function: rgSCHCmnDlAllocRetxRbTM5
28965 * Reference Parameter effBo is filled with alloced bytes.
28966 * Returns RFAILED if BO not satisfied at all.
28968 * Invoked by: rgSCHCmnDlAllocRetxRb
28970 * @param[in] RgSchCellCb *cell
28971 * @param[in] RgSchDlSf *subFrm
28972 * @param[in] RgSchUeCb *ue
28973 * @param[in] U32 bo
28974 * @param[out] U32 *effBo
28975 * @param[in] RgSchDlHqProcCb *proc
28976 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28981 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5
28988 RgSchDlHqProcCb *proc,
28989 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28992 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28998 RgSchDlHqProcCb *proc;
28999 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29002 TRC2(rgSCHCmnDlAllocRetxRbTM5);
29003 #if (ERRCLASS & ERRCLS_DEBUG)
29004 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
29012 * @brief This function determines the RBs and Bytes required for BO
29013 * transmission for UEs configured with TM 6.
29017 * Function: rgSCHCmnDlAllocTxRbTM6
29020 * Reference Parameter effBo is filled with alloced bytes.
29021 * Returns RFAILED if BO not satisfied at all.
29023 * Invoked by: rgSCHCmnDlAllocTxRb
29025 * @param[in] RgSchCellCb *cell
29026 * @param[in] RgSchDlSf *subFrm
29027 * @param[in] RgSchUeCb *ue
29028 * @param[in] U32 bo
29029 * @param[out] U32 *effBo
29030 * @param[in] RgSchDlHqProcCb *proc
29031 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29036 PRIVATE Void rgSCHCmnDlAllocTxRbTM6
29043 RgSchDlHqProcCb *proc,
29044 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29047 PRIVATE Void rgSCHCmnDlAllocTxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29053 RgSchDlHqProcCb *proc;
29054 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29057 RgSchDlRbAlloc *allocInfo;
29058 RgSchCmnDlUe *ueDl;
29062 TRC2(rgSCHCmnDlAllocTxRbTM6);
29065 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29066 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29068 if (ueDl->mimoInfo.forceTD)
29070 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29071 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29075 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29076 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29077 /* Fill precoding information for FORMAT 1B */
29078 /* First 4 least significant bits to indicate PMI.
29079 * 4th most significant corresponds to pmi Confirmation.
29081 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29082 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29084 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
29085 bo, &numRb, effBo);
29086 if (ret == RFAILED)
29088 /* If allocation couldn't be made then return */
29093 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
29096 /* Adding UE to RbAllocInfo TX Lst */
29097 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
29099 /* Fill UE alloc Info */
29100 allocInfo->rbsReq = numRb;
29101 allocInfo->dlSf = subFrm;
29107 * @brief This function determines the RBs and Bytes required for BO
29108 * retransmission for UEs configured with TM 6.
29112 * Function: rgSCHCmnDlAllocRetxRbTM6
29115 * Reference Parameter effBo is filled with alloced bytes.
29116 * Returns RFAILED if BO not satisfied at all.
29118 * Invoked by: rgSCHCmnDlAllocRetxRb
29120 * @param[in] RgSchCellCb *cell
29121 * @param[in] RgSchDlSf *subFrm
29122 * @param[in] RgSchUeCb *ue
29123 * @param[in] U32 bo
29124 * @param[out] U32 *effBo
29125 * @param[in] RgSchDlHqProcCb *proc
29126 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29131 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6
29138 RgSchDlHqProcCb *proc,
29139 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29142 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29148 RgSchDlHqProcCb *proc;
29149 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29152 RgSchDlRbAlloc *allocInfo;
29153 RgSchCmnDlUe *ueDl;
29157 TRC2(rgSCHCmnDlAllocRetxRbTM6);
29160 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29161 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29163 if (ueDl->mimoInfo.forceTD)
29165 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29166 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29170 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29171 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29172 /* Fill precoding information for FORMAT 1B */
29173 /* First 4 least significant bits to indicate PMI.
29174 * 4th most significant corresponds to pmi Confirmation.
29176 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29177 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29180 /* Get the Allocation in terms of RBs that are required for
29181 * this retx of TB1 */
29182 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
29184 if (ret == RFAILED)
29186 /* Allocation couldn't be made for Retx */
29187 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
29190 /* Adding UE to allocInfo RETX Lst */
29191 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
29192 /* Fill UE alloc Info */
29193 allocInfo->rbsReq = numRb;
29194 allocInfo->dlSf = subFrm;
29200 * @brief This function determines the RBs and Bytes required for BO
29201 * transmission for UEs configured with TM 7.
29205 * Function: rgSCHCmnDlAllocTxRbTM7
29208 * Reference Parameter effBo is filled with alloced bytes.
29209 * Returns RFAILED if BO not satisfied at all.
29211 * Invoked by: rgSCHCmnDlAllocTxRb
29213 * @param[in] RgSchCellCb *cell
29214 * @param[in] RgSchDlSf *subFrm
29215 * @param[in] RgSchUeCb *ue
29216 * @param[in] U32 bo
29217 * @param[out] U32 *effBo
29218 * @param[in] RgSchDlHqProcCb *proc
29219 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29224 PRIVATE Void rgSCHCmnDlAllocTxRbTM7
29231 RgSchDlHqProcCb *proc,
29232 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29235 PRIVATE Void rgSCHCmnDlAllocTxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29241 RgSchDlHqProcCb *proc;
29242 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29245 TRC2(rgSCHCmnDlAllocTxRbTM7);
29246 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29252 * @brief This function determines the RBs and Bytes required for BO
29253 * retransmission for UEs configured with TM 7.
29257 * Function: rgSCHCmnDlAllocRetxRbTM7
29260 * Reference Parameter effBo is filled with alloced bytes.
29261 * Returns RFAILED if BO not satisfied at all.
29263 * Invoked by: rgSCHCmnDlAllocRetxRb
29265 * @param[in] RgSchCellCb *cell
29266 * @param[in] RgSchDlSf *subFrm
29267 * @param[in] RgSchUeCb *ue
29268 * @param[in] U32 bo
29269 * @param[out] U32 *effBo
29270 * @param[in] RgSchDlHqProcCb *proc
29271 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29276 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7
29283 RgSchDlHqProcCb *proc,
29284 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29287 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29293 RgSchDlHqProcCb *proc;
29294 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29297 TRC2(rgSCHCmnDlAllocRetxRbTM7);
29298 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29304 * @brief This function invokes the TM specific DL TX RB Allocation routine.
29308 * Function: rgSCHCmnDlAllocTxRb
29309 * Purpose: This function invokes the TM specific
29310 * DL TX RB Allocation routine.
29312 * Invoked by: Specific Schedulers
29314 * @param[in] RgSchCellCb *cell
29315 * @param[in] RgSchDlSf *subFrm
29316 * @param[in] RgSchUeCb *ue
29317 * @param[in] U32 bo
29318 * @param[out] U32 *effBo
29319 * @param[in] RgSchDlHqProcCb *proc
29320 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29325 PUBLIC S16 rgSCHCmnDlAllocTxRb
29332 RgSchDlHqProcCb *proc,
29333 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29336 PUBLIC S16 rgSCHCmnDlAllocTxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29342 RgSchDlHqProcCb *proc;
29343 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29346 U32 newSchBits = 0;
29347 U32 prevSchBits = 0;
29348 RgSchDlRbAlloc *allocInfo;
29350 TRC2(rgSCHCmnDlAllocTxRb);
29352 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29354 ue->dl.aggTbBits = 0;
29358 /* Calculate totals bits previously allocated */
29359 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29360 if (allocInfo->tbInfo[0].schdlngForTb)
29362 prevSchBits += allocInfo->tbInfo[0].bytesReq;
29364 if (allocInfo->tbInfo[1].schdlngForTb)
29366 prevSchBits += allocInfo->tbInfo[1].bytesReq;
29369 /* Call TM specific RB allocation routine */
29370 (dlAllocTxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29371 proc, cellWdAllocInfo);
29375 /* Calculate totals bits newly allocated */
29376 if (allocInfo->tbInfo[0].schdlngForTb)
29378 newSchBits += allocInfo->tbInfo[0].bytesReq;
29380 if (allocInfo->tbInfo[1].schdlngForTb)
29382 newSchBits += allocInfo->tbInfo[1].bytesReq;
29384 if (newSchBits > prevSchBits)
29386 ue->dl.aggTbBits += ((newSchBits - prevSchBits) * 8);
29387 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29394 /* DwPTS Scheduling Changes Start */
29397 * @brief Retransmit decision for TDD. Retx is avoided in below cases
29398 * 1) DL Sf -> Spl Sf
29399 * 2) DL SF -> DL SF 0
29403 * Function: rgSCHCmnRetxAvoidTdd
29404 * Purpose: Avoid allocating RETX for cases 1, 2
29406 * Invoked by: rgSCHCmnRetxAvoidTdd
29408 * @param[in] RgSchDlSf *curSf
29409 * @param[in] RgSchCellCb *cell
29410 * @param[in] RgSchDlHqProcCb *proc
29415 PUBLIC Bool rgSCHCmnRetxAvoidTdd
29419 RgSchDlHqProcCb *proc
29422 PUBLIC Bool rgSCHCmnRetxAvoidTdd(curSf, cell, proc)
29425 RgSchDlHqProcCb *proc;
29428 RgSchTddSfType txSfType = 0;
29430 TRC2(rgSCHCmnRetxAvoidTdd);
29432 /* Get the RBs of TB that will be retransmitted */
29433 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29435 txSfType = proc->tbInfo[0].sfType;
29437 #ifdef XEON_SPECIFIC_CHANGES
29438 #ifndef XEON_TDD_SPCL
29439 /* Avoid re-transmission on Normal SF when the corresponding TB wss transmitted on SPCL SF */
29440 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29447 if (proc->tbInfo[1].state == HQ_TB_NACKED)
29449 /* Select the TxSf with the highest num of possible REs
29450 * In ascending order -> 1) SPL SF 2) DL_SF_0 3) DL_SF */
29451 txSfType = RGSCH_MAX(txSfType, proc->tbInfo[1].sfType);
29453 #ifdef XEON_SPECIFIC_CHANGES
29454 #ifndef XEON_TDD_SPCL
29455 /* Avoid re-transmission on Normal SF when the corresponding TB wss tranmitted on SPCL SF */
29456 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29464 if (txSfType > curSf->sfType)
29475 /* DwPTS Scheduling Changes End */
29478 * @brief Avoid allocating RETX incase of collision
29479 * with reserved resources for BCH/PSS/SSS occassions.
29483 * Function: rgSCHCmnRetxAllocAvoid
29484 * Purpose: Avoid allocating RETX incase of collision
29485 * with reserved resources for BCH/PSS/SSS occassions
29487 * Invoked by: rgSCHCmnDlAllocRetxRb
29489 * @param[in] RgSchDlSf *subFrm
29490 * @param[in] RgSchUeCb *ue
29491 * @param[in] RgSchDlHqProcCb *proc
29496 PUBLIC Bool rgSCHCmnRetxAllocAvoid
29500 RgSchDlHqProcCb *proc
29503 PUBLIC Bool rgSCHCmnRetxAllocAvoid(subFrm, cell, proc)
29506 RgSchDlHqProcCb *proc;
29511 TRC2(rgSCHCmnRetxAllocAvoid);
29513 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29515 reqRbs = proc->tbInfo[0].dlGrnt.numRb;
29519 reqRbs = proc->tbInfo[1].dlGrnt.numRb;
29521 /* Consider the dlGrnt.numRb of the Retransmitting proc->tbInfo
29522 * and current available RBs to determine if this RETX TB
29523 * will collide with the BCH/PSS/SSS occassion */
29524 if (subFrm->sfNum % 5 == 0)
29526 if ((subFrm->bwAssigned < cell->pbchRbEnd) &&
29527 (((subFrm->bwAssigned + reqRbs) - cell->pbchRbStart) > 0))
29539 * @brief This function invokes the TM specific DL RETX RB Allocation routine.
29543 * Function: rgSCHCmnDlAllocRetxRb
29544 * Purpose: This function invokes the TM specific
29545 * DL RETX RB Allocation routine.
29547 * Invoked by: Specific Schedulers
29549 * @param[in] RgSchCellCb *cell
29550 * @param[in] RgSchDlSf *subFrm
29551 * @param[in] RgSchUeCb *ue
29552 * @param[in] U32 bo
29553 * @param[out] U32 *effBo
29554 * @param[in] RgSchDlHqProcCb *proc
29555 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29560 PUBLIC S16 rgSCHCmnDlAllocRetxRb
29567 RgSchDlHqProcCb *proc,
29568 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29571 PUBLIC S16 rgSCHCmnDlAllocRetxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29577 RgSchDlHqProcCb *proc;
29578 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29581 U32 newSchBits = 0;
29582 RgSchDlRbAlloc *allocInfo;
29584 TRC2(rgSCHCmnDlAllocRetxRb);
29586 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29588 ue->dl.aggTbBits = 0;
29592 /* Check for DL BW exhaustion */
29593 if (subFrm->bw <= subFrm->bwAssigned)
29597 /* Call TM specific RB allocation routine */
29598 (dlAllocRetxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29599 proc, cellWdAllocInfo);
29603 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29604 /* Calculate totals bits newly allocated */
29605 if (allocInfo->tbInfo[0].schdlngForTb)
29607 newSchBits += allocInfo->tbInfo[0].bytesReq;
29609 if (allocInfo->tbInfo[1].schdlngForTb)
29611 newSchBits += allocInfo->tbInfo[1].bytesReq;
29613 ue->dl.aggTbBits += (newSchBits * 8);
29614 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29622 * @brief This function determines the RBs and Bytes required for
29623 * Transmission on 1 CW.
29627 * Function: rgSCHCmnDlAlloc1CwTxRb
29628 * Purpose: This function determines the RBs and Bytes required
29629 * for Transmission of DL SVC BO on 1 CW.
29630 * Also, takes care of SVC by SVC allocation by tracking
29631 * previous SVCs allocations.
29632 * Returns RFAILED if BO not satisfied at all.
29634 * Invoked by: DL UE Allocation
29636 * @param[in] RgSchCellCb *cell
29637 * @param[in] RgSchDlSf *subFrm
29638 * @param[in] RgSchUeCb *ue
29639 * @param[in] RgSchDlHqTbCb *tbInfo
29640 * @param[in] U32 bo
29641 * @param[out] U8 *numRb
29642 * @param[out] U32 *effBo
29647 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb
29652 RgSchDlHqTbCb *tbInfo,
29658 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, tbInfo, bo, numRb, effBo)
29662 RgSchDlHqTbCb *tbInfo;
29671 RgSchCmnDlUe *ueDl;
29672 RgSchDlRbAlloc *allocInfo;
29675 /* Correcting wrap around issue.
29676 * This change has been done at mutliple places in this function.*/
29678 TRC2(rgSCHCmnDlAlloc1CwTxRb);
29681 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29682 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29683 oldReq = ueDl->outStndAlloc;
29686 //TODO_SID: Currently setting max Tb size wrt to 5GTF TM3
29687 iTbs = ue->ue5gtfCb.mcs;
29688 ueDl->maxTbSz = MAX_5GTF_TB_SIZE * ue->ue5gtfCb.rank;
29689 ueDl->maxRb = MAX_5GTF_PRBS;
29691 ueDl->outStndAlloc += bo;
29692 /* consider Cumulative amount of this BO and bytes so far allocated */
29693 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbSz/8);
29694 /* Get the number of REs needed for this bo. */
29695 //noRes = ((bo * 8 * 1024) / eff);
29697 /* Get the number of RBs needed for this transmission */
29698 /* Number of RBs = No of REs / No of REs per RB */
29699 //tempNumRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29700 tempNumRb = MAX_5GTF_PRBS;
29701 tbSz = RGSCH_MIN(bo, (rgSch5gtfTbSzTbl[iTbs]/8) * ue->ue5gtfCb.rank);
29703 /* DwPts Scheduling Changes End */
29704 *effBo = RGSCH_MIN(tbSz - oldReq, reqBytes);
29707 //RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs);
29712 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbSz, \
29713 iTbs, imcs, tbInfo, ue->ue5gtfCb.rank);
29714 *numRb = (U8) tempNumRb;
29716 /* Update the subframe Allocated BW field */
29717 subFrm->bwAssigned = subFrm->bwAssigned + tempNumRb - allocInfo->rbsReq;
29724 * @brief This function is invoked in the event of any TB's allocation
29725 * being underutilized by the specific scheduler. Here we reduce iMcs
29726 * to increase redundancy and hence increase reception quality at UE.
29730 * Function: rgSCHCmnRdcImcsTxTb
29731 * Purpose: This function shall reduce the iMcs in accordance with
29732 * the total consumed bytes by the UE at allocation
29735 * Invoked by: UE DL Allocation finalization routine
29736 * of specific scheduler.
29738 * @param[in] RgSchDlRbAlloc *allocInfo
29739 * @param[in] U8 tbInfoIdx
29740 * @param[in] U32 cnsmdBytes
29745 PUBLIC Void rgSCHCmnRdcImcsTxTb
29747 RgSchDlRbAlloc *allocInfo,
29752 PUBLIC Void rgSCHCmnRdcImcsTxTb(allocInfo, tbInfoIdx, cnsmdBytes)
29753 RgSchDlRbAlloc *allocInfo;
29759 /*The below functionality is not needed.*/
29764 TRC2(rgSCHCmnRdcImcsTxTb);
29766 iTbs = allocInfo->tbInfo[tbInfoIdx].iTbs;
29767 noLyr = allocInfo->tbInfo[tbInfoIdx].noLyr;
29768 numRb = allocInfo->rbsAlloc;
29771 if ((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) == cnsmdBytes)
29776 /* Get iTbs as suitable for the consumed bytes */
29777 while((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) > cnsmdBytes)
29781 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].\
29782 tbCb->dlGrnt.iMcs);
29788 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].tbCb->dlGrnt.iMcs);
29795 * @brief This function determines the RBs and Bytes required for
29796 * Transmission on 2 CWs.
29800 * Function: rgSCHCmnDlAlloc2CwTxRb
29801 * Purpose: This function determines the RBs and Bytes required
29802 * for Transmission of DL SVC BO on 2 CWs.
29803 * Also, takes care of SVC by SVC allocation by tracking
29804 * previous SVCs allocations.
29805 * Returns RFAILED if BO not satisfied at all.
29807 * Invoked by: TM3 and TM4 DL UE Allocation
29809 * @param[in] RgSchCellCb *cell
29810 * @param[in] RgSchDlSf *subFrm
29811 * @param[in] RgSchUeCb *ue
29812 * @param[in] RgSchDlHqProcCb *proc
29813 * @param[in] RgSchDlHqProcCb bo
29814 * @param[out] U8 *numRb
29815 * @param[out] U32 *effBo
29820 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb
29825 RgSchDlHqProcCb *proc,
29831 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, numRbRef, effBo)
29835 RgSchDlHqProcCb *proc;
29847 RgSchCmnDlCell *cellDl;
29848 RgSchCmnDlUe *ueDl;
29849 RgSchDlRbAlloc *allocInfo;
29852 /* Fix: MUE_PERTTI_DL */
29854 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
29855 U8 cfi = cellSch->dl.currCfi;
29862 TRC2(rgSCHCmnDlAlloc2CwTxRb);
29865 cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
29866 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29867 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29868 oldReq = ueDl->outStndAlloc;
29871 if (ueDl->maxTbBits > ue->dl.aggTbBits)
29873 availBits = ueDl->maxTbBits - ue->dl.aggTbBits;
29875 /* check if we can further allocate to this UE */
29876 if ((ue->dl.aggTbBits >= ueDl->maxTbBits) ||
29877 (allocInfo->tbInfo[0].bytesReq >= ueDl->maxTbSz/8) ||
29878 (allocInfo->tbInfo[1].bytesReq >= ueDl->maxTbSz/8) ||
29879 (allocInfo->rbsReq >= ueDl->maxRb))
29881 RLOG_ARG0(L_DEBUG,DBG_CELLID,cell->cellId,
29882 "rgSCHCmnDlAllocRb(): UEs max allocation exceed");
29886 noLyr1 = ueDl->mimoInfo.cwInfo[0].noLyr;
29887 noLyr2 = ueDl->mimoInfo.cwInfo[1].noLyr;
29889 /* If there is no CFI change, continue to use the BLER based
29891 if (ueDl->lastCfi == cfi)
29893 iTbs1 = ueDl->mimoInfo.cwInfo[0].iTbs[noLyr1 - 1];
29894 iTbs2 = ueDl->mimoInfo.cwInfo[1].iTbs[noLyr2 - 1];
29898 U8 cqi = ueDl->mimoInfo.cwInfo[0].cqi;
29900 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 0, noLyr1);
29902 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 0, noLyr1);
29905 cqi = ueDl->mimoInfo.cwInfo[1].cqi;
29907 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 1, noLyr2);
29909 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 1, noLyr2);
29913 /*ccpu00131191 and ccpu00131317 - Fix for RRC Reconfig failure
29914 * issue for VoLTE call */
29915 //if ((proc->hasDcch) || (TRUE == rgSCHLaaSCellEnabled(cell)))
29935 else if(!cellSch->dl.isDlFreqSel)
29938 /* for Tdd reduce iTbs only for SF0. SF5 contains only
29939 * SSS and can be ignored */
29940 if (subFrm->sfNum == 0)
29942 (iTbs1 > 1)? (iTbs1 -= 1) : (iTbs1 = 0);
29943 (iTbs2 > 1)? (iTbs2 -= 1) : (iTbs2 = 0);
29945 /* For SF 3 and 8 CRC is getting failed in DL.
29946 Need to do proper fix after the replay from
29948 #ifdef CA_PHY_BRDCM_61765
29949 if ((subFrm->sfNum == 3) || (subFrm->sfNum == 8))
29951 (iTbs1 > 2)? (iTbs1 -= 2) : (iTbs1 = 0);
29952 (iTbs2 > 2)? (iTbs2 -= 2) : (iTbs2 = 0);
29960 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
29962 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
29966 eff1 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr1 - 1][cfi]))[iTbs1];
29967 eff2 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr2 - 1][cfi]))[iTbs2];
29970 bo = RGSCH_MIN(bo,availBits/8);
29971 ueDl->outStndAlloc += bo;
29972 /* consider Cumulative amount of this BO and bytes so far allocated */
29973 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbBits/8);
29974 bo = RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff1)/(eff1+eff2)),
29976 RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff2)/(eff1+eff2)),
29977 (ueDl->maxTbSz)/8) +
29978 1; /* Add 1 to adjust the truncation at weighted averaging */
29979 /* Get the number of REs needed for this bo. */
29980 noRes = ((bo * 8 * 1024) / (eff1 + eff2));
29982 /* Get the number of RBs needed for this transmission */
29983 /* Number of RBs = No of REs / No of REs per RB */
29984 numRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29985 /* Cannot exceed the maximum number of RBs per UE */
29986 if (numRb > ueDl->maxRb)
29988 numRb = ueDl->maxRb;
29993 if(RFAILED == rgSCHLaaCmn2CwAdjustPrb(allocInfo, boTmp, &numRb, ueDl, noLyr1, noLyr2, iTbs1, iTbs2))
29996 while ((numRb <= ueDl->maxRb) &&
29997 (rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1] <= ueDl->maxTbSz) &&
29998 (rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1] <= ueDl->maxTbSz) &&
29999 ((rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8 +
30000 rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8) <= bo))
30006 availBw = subFrm->bw - subFrm->bwAssigned;
30007 /* Cannot exceed the total number of RBs in the cell */
30008 if ((S16)(numRb - allocInfo->rbsReq) > availBw)
30010 numRb = availBw + allocInfo->rbsReq;
30012 tb1Sz = rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8;
30013 tb2Sz = rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8;
30014 /* DwPts Scheduling Changes Start */
30016 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
30018 /* Max Rb for Special Sf is approximated as 4/3 of maxRb */
30019 rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, (U8*)&numRb, ueDl->maxRb*4/3,
30020 &iTbs1, &iTbs2, noLyr1,
30021 noLyr2, &tb1Sz, &tb2Sz, cfi);
30022 /* Check for available Bw */
30023 if ((S16)numRb - allocInfo->rbsReq > availBw)
30025 numRb = availBw + allocInfo->rbsReq;
30026 tb1Sz = rgTbSzTbl[noLyr1-1][iTbs1][RGSCH_MAX(numRb*3/4,1)-1]/8;
30027 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs2][RGSCH_MAX(numRb*3/4,1)-1]/8;
30031 /* DwPts Scheduling Changes End */
30032 /* Update the subframe Allocated BW field */
30033 subFrm->bwAssigned = subFrm->bwAssigned + numRb - \
30036 *effBo = RGSCH_MIN((tb1Sz + tb2Sz) - oldReq, reqBytes);
30039 if (ROK != rgSCHLaaCmn2TBPrbCheck(allocInfo, tb1Sz, tb2Sz, boTmp, effBo, iTbs1, iTbs2, numRb, proc))
30045 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs1, imcs1);
30046 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs2, imcs2);
30047 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tb1Sz, \
30048 iTbs1, imcs1, &proc->tbInfo[0], noLyr1);
30049 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30050 iTbs2, imcs2, &proc->tbInfo[1], noLyr2);
30051 *numRbRef = (U8)numRb;
30059 * @brief This function determines the RBs and Bytes required for
30060 * Transmission & Retransmission on 2 CWs.
30064 * Function: rgSCHCmnDlAlloc2CwTxRetxRb
30065 * Purpose: This function determines the RBs and Bytes required
30066 * for Transmission & Retransmission on 2 CWs. Allocate
30067 * RETX TB on a better CW and restrict new TX TB by
30069 * Returns RFAILED if BO not satisfied at all.
30071 * Invoked by: TM3 and TM4 DL UE Allocation
30073 * @param[in] RgSchCellCb *cell
30074 * @param[in] RgSchDlSf *subFrm
30075 * @param[in] RgSchUeCb *ue
30076 * @param[in] RgSchDlHqTbCb *reTxTb
30077 * @param[in] RgSchDlHqTbCb *txTb
30078 * @param[out] U8 *numRb
30079 * @param[out] U32 *effBo
30084 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb
30089 RgSchDlHqTbCb *reTxTb,
30090 RgSchDlHqTbCb *txTb,
30095 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, reTxTb, txTb, numRb,\
30100 RgSchDlHqTbCb *reTxTb;
30101 RgSchDlHqTbCb *txTb;
30106 RgSchCmnDlUe *ueDl;
30107 RgSchDlRbAlloc *allocInfo;
30111 RgSchCmnDlUeCwInfo *otherCw;
30113 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
30114 U8 cfi = cellDl->currCfi;
30117 TRC2(rgSCHCmnDlAlloc2CwTxRetxRb);
30119 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
30120 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30121 otherCw = &ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)];
30124 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30125 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30127 availBw = subFrm->bw - subFrm->bwAssigned;
30128 *numRb = reTxTb->dlGrnt.numRb;
30130 #ifdef XEON_TDD_SPCL
30131 *numRb = (reTxTb->initTxNumRbs);
30132 if(reTxTb->sfType == RG_SCH_SPL_SF_DATA && subFrm->sfType != RG_SCH_SPL_SF_DATA)
30134 *numRb = (reTxTb->initTxNumRbs*3/4);
30138 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30144 if ((S16)*numRb > availBw)
30148 /* Update the subframe Allocated BW field */
30149 subFrm->bwAssigned += *numRb;
30150 noLyr2 = otherCw->noLyr;
30151 RG_SCH_CMN_GET_MCS_FOR_RETX(reTxTb, imcs1);
30153 /* If there is no CFI change, continue to use the BLER based
30155 if (ueDl->lastCfi == cfi)
30157 iTbs = otherCw->iTbs[noLyr2-1];
30162 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, otherCw->cqi, cfi,
30163 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30165 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, otherCw->cqi, cfi,
30166 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30169 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs][*numRb-1]/8;
30170 /* DwPts Scheduling Changes Start */
30173 /* DwPts Scheduling Changes End */
30174 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs2);
30176 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], reTxTb->tbSz, \
30177 0, imcs1, reTxTb, reTxTb->numLyrs);
30179 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30180 iTbs, imcs2, txTb, noLyr2);
30182 *effBo = reTxTb->tbSz + tb2Sz;
30189 * @brief This function determines the RBs and Bytes required for BO
30190 * Retransmission on 2 CWs.
30194 * Function: rgSCHCmnDlAlloc2CwRetxRb
30195 * Purpose: This function determines the RBs and Bytes required
30196 * for BO Retransmission on 2 CWs. Allocate larger TB
30197 * on a better CW and check if the smaller TB can be
30198 * accomodated on the other CW.
30199 * Returns RFAILED if BO not satisfied at all.
30201 * Invoked by: Common Scheduler
30203 * @param[in] RgSchCellCb *cell
30204 * @param[in] RgSchDlSf *subFrm
30205 * @param[in] RgSchUeCb *ue
30206 * @param[in] RgSchDlHqProcCb *proc
30207 * @param[out] U8 *numRb
30208 * @param[out] Bool *swpFlg
30209 * @param[out] U32 *effBo
30214 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb
30219 RgSchDlHqProcCb *proc,
30225 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc,\
30226 numRb, swpFlg, effBo)
30230 RgSchDlHqProcCb *proc;
30236 RgSchDlRbAlloc *allocInfo;
30239 RgSchDlHqTbCb *lrgTbInfo, *othrTbInfo;
30241 TRC2(rgSCHCmnDlAlloc2CwRetxRb);
30243 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30246 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30247 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30249 lrgTbInfo = &proc->tbInfo[0];
30250 othrTbInfo = &proc->tbInfo[1];
30251 *numRb = lrgTbInfo->dlGrnt.numRb;
30252 #ifdef XEON_TDD_SPCL
30253 if((lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA || othrTbInfo->sfType == RG_SCH_SPL_SF_DATA))
30255 if(lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA)
30257 *numRb = (lrgTbInfo->initTxNumRbs);
30261 *numRb = (othrTbInfo->initTxNumRbs);
30264 if(subFrm->sfType != RG_SCH_SPL_SF_DATA)
30266 *numRb = (*numRb)*3/4;
30271 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30276 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30280 /* Update the subframe Allocated BW field */
30281 subFrm->bwAssigned += *numRb;
30282 RG_SCH_CMN_GET_MCS_FOR_RETX(lrgTbInfo, imcs1);
30283 RG_SCH_CMN_GET_MCS_FOR_RETX(othrTbInfo, imcs2);
30284 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], lrgTbInfo->tbSz, \
30285 0, imcs1, lrgTbInfo, lrgTbInfo->numLyrs);
30286 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], othrTbInfo->tbSz, \
30287 0, imcs2, othrTbInfo, othrTbInfo->numLyrs);
30288 *effBo = lrgTbInfo->tbSz + othrTbInfo->tbSz;
30297 * @brief This function determines the RBs and Bytes required for BO
30298 * Retransmission on 1 CW.
30302 * Function: rgSCHCmnDlAlloc1CwRetxRb
30303 * Purpose: This function determines the RBs and Bytes required
30304 * for BO Retransmission on 1 CW, the first CW.
30305 * Returns RFAILED if BO not satisfied at all.
30307 * Invoked by: Common Scheduler
30309 * @param[in] RgSchCellCb *cell
30310 * @param[in] RgSchDlSf *subFrm
30311 * @param[in] RgSchUeCb *ue
30312 * @param[in] RgSchDlHqTbCb *tbInfo
30313 * @param[in] U8 noLyr
30314 * @param[out] U8 *numRb
30315 * @param[out] U32 *effBo
30320 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb
30325 RgSchDlHqTbCb *tbInfo,
30331 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, tbInfo, noLyr,\
30336 RgSchDlHqTbCb *tbInfo;
30342 RgSchDlRbAlloc *allocInfo;
30345 TRC2(rgSCHCmnDlAlloc1CwRetxRb);
30347 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30350 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30351 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30353 *numRb = tbInfo->dlGrnt.numRb;
30354 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30358 /* Update the subframe Allocated BW field */
30359 subFrm->bwAssigned += *numRb;
30360 imcs = tbInfo->dlGrnt.iMcs;
30361 allocInfo->dciFormat = tbInfo->dlGrnt.dciFormat;
30362 /* Fix: For a RETX TB the iTbs is irrelevant, hence setting 0 */
30363 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbInfo->tbSz, \
30364 0, imcs, tbInfo, tbInfo->numLyrs);
30365 *effBo = tbInfo->tbSz;
30373 * @brief This function is called to handle Release PDCCH feedback for SPS UE
30377 * Function: rgSCHCmnDlRelPdcchFbk
30378 * Purpose: Invokes SPS module to handle release PDCCH feedback
30382 * @param[in] RgSchCellCb *cell
30383 * @param[in] RgSchUeCb *ue
30384 * @param[in] Bool isAck
30389 PUBLIC Void rgSCHCmnDlRelPdcchFbk
30396 PUBLIC Void rgSCHCmnDlRelPdcchFbk(cell, ue, isAck)
30403 TRC2(rgSCHCmnDlRelPdcchFbk);
30404 rgSCHCmnSpsDlRelPdcchFbk(cell, ue, isAck);
30411 * @brief This function is invoked to handle Ack processing for a HARQ proc.
30415 * Function: rgSCHCmnDlProcAck
30416 * Purpose: DTX processing for HARQ proc
30420 * @param[in] RgSchCellCb *cell
30421 * @param[in] RgSchDlHqProcCb *hqP
30426 PUBLIC Void rgSCHCmnDlProcAck
30429 RgSchDlHqProcCb *hqP
30432 PUBLIC Void rgSCHCmnDlProcAck(cell, hqP)
30434 RgSchDlHqProcCb *hqP;
30438 TRC2(rgSCHCmnDlProcAck);
30440 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
30442 /* Invoke SPS module if SPS service was scheduled for this HARQ proc */
30443 rgSCHCmnSpsDlProcAck(cell, hqP);
30447 #ifdef RGSCH_SPS_STATS
30448 extern U32 rgSchStatCrntiCeRcvCnt;
30451 * @brief This function is invoked to handle CRNTI CE reception for an UE
30455 * Function: rgSCHCmnHdlCrntiCE
30456 * Purpose: Handle CRNTI CE reception
30460 * @param[in] RgSchCellCb *cell
30461 * @param[in] RgSchDlHqProcCb *hqP
30466 PUBLIC Void rgSCHCmnHdlCrntiCE
30472 PUBLIC Void rgSCHCmnHdlCrntiCE(cell, ue)
30478 TRC2(rgSCHCmnHdlCrntiCE);
30479 #ifdef RGSCH_SPS_STATS
30480 rgSchStatCrntiCeRcvCnt++;
30483 /* When UL sync lost happened due to TA timer expiry UE is being moved to
30484 PDCCH order inactivity list.But when CRNTI CE received in msg3 from UE
30485 we are not moving UE into active state due to that RRC Reconfiguration is
30487 So here we are moving UE to active list whenever we receive the CRNTI CE and
30489 /* CR ccpu00144525 */
30490 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
30492 /* Activate this UE if it was inactive */
30493 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30494 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30497 /* Handling is same as reception of UE RESET for both DL and UL */
30498 if (ue->dl.dlSpsCfg.isDlSpsEnabled)
30500 rgSCHCmnSpsDlUeReset(cell, ue);
30502 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30504 rgSCHCmnSpsUlUeReset(cell, ue);
30512 * @brief This function is called to handle relInd from MAC for a UE
30516 * Function: rgSCHCmnUlSpsRelInd
30517 * Purpose: Invokes SPS module to handle UL SPS release for a UE
30519 * Invoked by: SCH_UTL
30521 * @param[in] RgSchCellCb *cell
30522 * @param[in] RgSchUeCb *ue
30523 * @param[in] Bool isExplRel
30528 PUBLIC Void rgSCHCmnUlSpsRelInd
30535 PUBLIC Void rgSCHCmnUlSpsRelInd(cell, ue, isExplRel)
30542 TRC2(rgSCHCmnUlSpsRelInd);
30543 rgSCHCmnSpsUlProcRelInd(cell, ue, isExplRel);
30546 } /* end of rgSCHCmnUlSpsRelInd */
30549 * @brief This function is called to handle SPS Activate Ind from MAC for a UE
30553 * Function: rgSCHCmnUlSpsActInd
30554 * Purpose: Invokes SPS module to handle UL SPS activate for a UE
30556 * Invoked by: SCH_UTL
30558 * @param[in] RgSchCellCb *cell
30559 * @param[in] RgSchUeCb *ue
30564 PUBLIC Void rgSCHCmnUlSpsActInd
30571 PUBLIC Void rgSCHCmnUlSpsActInd(cell, ue,spsSduSize)
30578 TRC2(rgSCHCmnUlSpsActInd);
30580 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30582 rgSCHCmnSpsUlProcActInd(cell, ue,spsSduSize);
30586 } /* end of rgSCHCmnUlSpsActInd */
30589 * @brief This function is called to handle CRC in UL for UEs
30590 * undergoing SPS release
30594 * Function: rgSCHCmnUlCrcInd
30595 * Purpose: Invokes SPS module to handle CRC in UL for SPS UE
30597 * Invoked by: SCH_UTL
30599 * @param[in] RgSchCellCb *cell
30600 * @param[in] RgSchUeCb *ue
30601 * @param[in] CmLteTimingInfo crcTime
30606 PUBLIC Void rgSCHCmnUlCrcInd
30610 CmLteTimingInfo crcTime
30613 PUBLIC Void rgSCHCmnUlCrcInd(cell, ue, crcTime)
30616 CmLteTimingInfo crcTime;
30620 TRC2(rgSCHCmnUlCrcInd);
30621 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30623 rgSCHCmnSpsUlProcCrcInd(cell, ue, crcTime);
30627 } /* end of rgSCHCmnUlCrcFailInd */
30630 * @brief This function is called to handle CRC failure in UL
30634 * Function: rgSCHCmnUlCrcFailInd
30635 * Purpose: Invokes SPS module to handle CRC failure in UL for SPS UE
30637 * Invoked by: SCH_UTL
30639 * @param[in] RgSchCellCb *cell
30640 * @param[in] RgSchUeCb *ue
30641 * @param[in] CmLteTimingInfo crcTime
30646 PUBLIC Void rgSCHCmnUlCrcFailInd
30650 CmLteTimingInfo crcTime
30653 PUBLIC Void rgSCHCmnUlCrcFailInd(cell, ue, crcTime)
30656 CmLteTimingInfo crcTime;
30660 TRC2(rgSCHCmnUlCrcFailInd);
30661 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30663 rgSCHCmnSpsUlProcDtxInd(cell, ue, crcTime);
30667 } /* end of rgSCHCmnUlCrcFailInd */
30669 #endif /* LTEMAC_SPS */
30672 * @brief BCH,BCCH,PCCH Dowlink Scheduling Handler.
30676 * Function: rgSCHCmnDlBcchPcchAlloc
30677 * Purpose: This function calls common scheduler APIs to
30678 * schedule for BCCH/PCCH.
30679 * It then invokes Allocator for actual RB
30680 * allocations. It processes on the actual resources allocated
30681 * against requested to the allocator module.
30683 * Invoked by: Common Scheduler
30685 * @param[in] RgSchCellCb *cell
30689 PRIVATE Void rgSCHCmnDlBcchPcchAlloc
30694 PRIVATE Void rgSCHCmnDlBcchPcchAlloc(cell)
30699 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
30701 #ifdef LTEMAC_HDFDD
30702 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
30704 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
30707 RgInfSfAlloc *nextsfAlloc = &(cell->sfAllocArr[nextSfIdx]);
30708 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30709 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
30711 TRC2(rgSCHCmnDlBcchPcchAlloc);
30714 /*Reset the bitmask for BCCH/PCCH*/
30715 rgSCHUtlResetSfAlloc(nextsfAlloc,TRUE,FALSE);
30716 #ifndef DISABLE_MIB_SIB /* Not sending MIB and SIB to CL */
30718 rgSCHChkNUpdSiCfg(cell);
30719 rgSCHSelectSi(cell);
30722 /*Perform the scheduling for BCCH,PCCH*/
30723 rgSCHCmnDlBcchPcch(cell, allocInfo, nextsfAlloc);
30725 /* Call common allocator for RB Allocation */
30726 rgSCHBcchPcchDlRbAlloc(cell, allocInfo);
30728 /* Finalize the Allocations for reqested Against alloced */
30729 rgSCHCmnDlBcchPcchFnlz(cell, allocInfo);
30730 #endif /* DISABLE_MIB_SIB */
30735 * @brief Handles RB allocation for BCCH/PCCH for downlink.
30739 * Function : rgSCHBcchPcchDlRbAlloc
30741 * Invoking Module Processing:
30742 * - This function is invoked for DL RB allocation of BCCH/PCCH
30744 * Processing Steps:
30745 * - If cell is frequency selecive,
30746 * - Call rgSCHDlfsBcchPcchAllocRb().
30748 * - Do the processing
30750 * @param[in] RgSchCellCb *cell
30751 * @param[in] RgSchDlRbAllocInfo *allocInfo
30756 PRIVATE Void rgSCHBcchPcchDlRbAlloc
30759 RgSchCmnDlRbAllocInfo *allocInfo
30762 PRIVATE Void rgSCHBcchPcchDlRbAlloc(cell, allocInfo)
30764 RgSchCmnDlRbAllocInfo *allocInfo;
30767 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30769 TRC2(rgSCHBcchPcchDlRbAlloc);
30772 if (cellSch->dl.isDlFreqSel)
30774 cellSch->apisDlfs->rgSCHDlfsBcchPcchAllocRb(cell, allocInfo);
30778 rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo);
30785 * @brief Handles RB allocation for BCCH,PCCH for frequency
30786 * non-selective cell.
30790 * Function : rgSCHCmnNonDlfsBcchPcchRbAlloc
30792 * Invoking Module Processing:
30793 * - SCH shall invoke this if downlink frequency selective is disabled for
30794 * the cell for RB allocation.
30795 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
30796 * estimate and subframe for each allocation to be made to SCH.
30798 * Processing Steps:
30799 * - Allocate sequentially for BCCH,PCCH common channels.
30801 * @param[in] RgSchCellCb *cell
30802 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
30807 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc
30810 RgSchCmnDlRbAllocInfo *allocInfo
30813 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo)
30815 RgSchCmnDlRbAllocInfo *allocInfo;
30818 RgSchDlRbAlloc *reqAllocInfo;
30820 TRC2(rgSCHCmnNonDlfsBcchPcchRbAlloc);
30823 /* Allocate for PCCH */
30824 reqAllocInfo = &(allocInfo->pcchAlloc);
30825 if (reqAllocInfo->rbsReq)
30827 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30829 /* Allocate for BCCH on DLSCH */
30830 reqAllocInfo = &(allocInfo->bcchAlloc);
30831 if (reqAllocInfo->rbsReq)
30833 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30841 * @brief This function implements the handling to check and
30842 * update the SI cfg at the start of the modificiation period.
30846 * Function: rgSCHChkNUpdSiCfg
30847 * Purpose: This function implements handling for update of SI Cfg
30848 * at the start of modification period.
30850 * Invoked by: Scheduler
30852 * @param[in] RgSchCellCb* cell
30858 PRIVATE Void rgSCHChkNUpdSiCfg
30863 PRIVATE Void rgSCHChkNUpdSiCfg(cell)
30867 CmLteTimingInfo pdSchTmInfo;
30869 TRC2(rgSCHChkNUpdSiCfg);
30872 pdSchTmInfo = cell->crntTime;
30873 #ifdef LTEMAC_HDFDD
30874 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
30875 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
30876 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
30878 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA);
30882 /* Updating the SIB1 for Warning SI message immediately after it is received
30883 * from application. No need to wait for next modification period.
30885 if((pdSchTmInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
30886 && (RGSCH_SIB1_TX_SF_NUM == (pdSchTmInfo.slot % RGSCH_NUM_SUB_FRAMES)))
30888 /*Check whether SIB1 with PWS has been updated*/
30889 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_PWS_UPD)
30891 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30892 cell->siCb.newSiInfo.sib1Info.sib1);
30893 cell->siCb.crntSiInfo.sib1Info.mcs =
30894 cell->siCb.newSiInfo.sib1Info.mcs;
30895 cell->siCb.crntSiInfo.sib1Info.nPrb =
30896 cell->siCb.newSiInfo.sib1Info.nPrb;
30897 cell->siCb.crntSiInfo.sib1Info.msgLen =
30898 cell->siCb.newSiInfo.sib1Info.msgLen;
30899 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_PWS_UPD;
30903 /*Check if this SFN and SF No marks the start of next modification
30904 period. If current SFN,SF No doesn't marks the start of next
30905 modification period, then return. */
30906 if(!((pdSchTmInfo.sfn % cell->siCfg.modPrd == 0)
30907 && (0 == pdSchTmInfo.slot)))
30908 /*if(!((((pdSchTmInfo.hSfn * 1024) + pdSchTmInfo.sfn) % cell->siCfg.modPrd == 0)
30909 && (0 == pdSchTmInfo.slot)))*/
30914 /*Check whether MIB has been updated*/
30915 if(cell->siCb.siBitMask & RGSCH_SI_MIB_UPD)
30917 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.mib,
30918 cell->siCb.newSiInfo.mib);
30919 cell->siCb.siBitMask &= ~RGSCH_SI_MIB_UPD;
30922 /*Check whether SIB1 has been updated*/
30923 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_UPD)
30925 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30926 cell->siCb.newSiInfo.sib1Info.sib1);
30927 cell->siCb.crntSiInfo.sib1Info.mcs = cell->siCb.newSiInfo.sib1Info.mcs;
30928 cell->siCb.crntSiInfo.sib1Info.nPrb = cell->siCb.newSiInfo.sib1Info.nPrb;
30929 cell->siCb.crntSiInfo.sib1Info.msgLen =
30930 cell->siCb.newSiInfo.sib1Info.msgLen;
30931 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_UPD;
30934 /*Check whether SIs have been updated*/
30935 if(cell->siCb.siBitMask & RGSCH_SI_SI_UPD)
30939 /*Check if SI cfg have been modified And Check if numSi have
30940 been changed, if yes then we would need to update the
30941 pointers for all the SIs */
30942 if((cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD) &&
30943 (cell->siCfg.numSi != cell->siCb.newSiCfg.numSi))
30945 for(idx = 0;idx < cell->siCb.newSiCfg.numSi;idx++)
30947 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30948 cell->siCb.newSiInfo.siInfo[idx].si);
30949 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30950 cell->siCb.siArray[idx].isWarningSi = FALSE;
30952 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30953 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30954 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30957 /*If numSi have been reduced then we need to free the
30958 pointers at the indexes in crntSiInfo which haven't
30959 been exercised. If numSi has increased then nothing
30960 additional is requires as above handling has taken
30962 if(cell->siCfg.numSi > cell->siCb.newSiCfg.numSi)
30964 for(idx = cell->siCb.newSiCfg.numSi;
30965 idx < cell->siCfg.numSi;idx++)
30967 RGSCH_FREE_MSG(cell->siCb.crntSiInfo.siInfo[idx].si);
30968 cell->siCb.siArray[idx].si = NULLP;
30974 /*numSi has not been updated, we just need to update the
30975 pointers for the SIs which are set to NON NULLP */
30976 /*ccpu00118260 - Correct Update of SIB2 */
30977 for(idx = 0;idx < cell->siCfg.numSi;idx++)
30979 if(NULLP != cell->siCb.newSiInfo.siInfo[idx].si)
30981 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30982 cell->siCb.newSiInfo.siInfo[idx].si);
30984 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30985 cell->siCb.siArray[idx].isWarningSi = FALSE;
30986 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30987 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30988 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30992 cell->siCb.siBitMask &= ~RGSCH_SI_SI_UPD;
30995 /*Check whether SI cfg have been updated*/
30996 if(cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD)
30998 cell->siCfg = cell->siCb.newSiCfg;
30999 cell->siCb.siBitMask &= ~RGSCH_SI_SICFG_UPD;
31007 * @brief This function implements the selection of the SI
31008 * that is to be scheduled.
31012 * Function: rgSCHSelectSi
31013 * Purpose: This function implements the selection of SI
31014 * that is to be scheduled.
31016 * Invoked by: Scheduler
31018 * @param[in] RgSchCellCb* cell
31024 PRIVATE Void rgSCHSelectSi
31029 PRIVATE Void rgSCHSelectSi(cell)
31033 CmLteTimingInfo crntTmInfo;
31038 TRC2(rgSCHSelectSi);
31041 crntTmInfo = cell->crntTime;
31042 #ifdef LTEMAC_HDFDD
31043 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31044 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31045 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31047 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA);
31050 siWinSize = cell->siCfg.siWinSize;
31052 /* Select SI only once at the starting of the new window */
31053 if(cell->siCb.inWindow)
31055 if ((crntTmInfo.sfn % cell->siCfg.minPeriodicity) == 0 &&
31056 crntTmInfo.slot == 0)
31058 /* Reinit inWindow at the beginning of every SI window */
31059 cell->siCb.inWindow = siWinSize - 1;
31063 cell->siCb.inWindow--;
31067 else /* New window. Re-init the winSize counter with the window length */
31069 if((cell->siCb.siArray[cell->siCb.siCtx.siId - 1].isWarningSi == TRUE)&&
31070 (cell->siCb.siCtx.retxCntRem != 0))
31072 rgSCHUtlFreeWarningSiPdu(cell);
31073 cell->siCb.siCtx.warningSiFlag = FALSE;
31076 cell->siCb.inWindow = siWinSize - 1;
31079 x = rgSCHCmnGetSiSetId(crntTmInfo.sfn, crntTmInfo.slot,
31080 cell->siCfg.minPeriodicity);
31082 /* Window Id within a SI set. This window Id directly maps to a
31084 windowId = (((crntTmInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) +
31085 crntTmInfo.slot) - (x * (cell->siCfg.minPeriodicity * 10)))
31088 if(windowId >= RGR_MAX_NUM_SI)
31091 /* Update the siCtx if there is a valid SI and its periodicity
31093 if (NULLP != cell->siCb.siArray[windowId].si)
31095 /* Warning SI Periodicity is same as SIB2 Periodicity */
31096 if(((cell->siCb.siArray[windowId].isWarningSi == FALSE) &&
31097 (x % (cell->siCfg.siPeriodicity[windowId]
31098 /cell->siCfg.minPeriodicity) == 0)) ||
31099 ((cell->siCb.siArray[windowId].isWarningSi == TRUE) &&
31100 (x % (cell->siCfg.siPeriodicity[0]
31101 /cell->siCfg.minPeriodicity) == 0)))
31103 cell->siCb.siCtx.siId = windowId+1;
31104 cell->siCb.siCtx.retxCntRem = cell->siCfg.retxCnt;
31105 cell->siCb.siCtx.warningSiFlag = cell->siCb.siArray[windowId].
31107 cell->siCb.siCtx.timeToTx.sfn = crntTmInfo.sfn;
31108 cell->siCb.siCtx.timeToTx.slot = crntTmInfo.slot;
31110 RG_SCH_ADD_TO_CRNT_TIME(cell->siCb.siCtx.timeToTx,
31111 cell->siCb.siCtx.maxTimeToTx, (siWinSize - 1))
31115 {/* Update the siCtx with invalid si Id */
31116 cell->siCb.siCtx.siId = 0;
31124 * @brief This function implements scheduler DL allocation for
31129 * Function: rgSCHDlSiSched
31130 * Purpose: This function implements scheduler for DL allocation
31133 * Invoked by: Scheduler
31135 * @param[in] RgSchCellCb* cell
31141 PRIVATE Void rgSCHDlSiSched
31144 RgSchCmnDlRbAllocInfo *allocInfo,
31145 RgInfSfAlloc *subfrmAlloc
31148 PRIVATE Void rgSCHDlSiSched(cell, allocInfo, subfrmAlloc)
31150 RgSchCmnDlRbAllocInfo *allocInfo;
31151 RgInfSfAlloc *subfrmAlloc;
31154 CmLteTimingInfo crntTimInfo;
31160 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31161 /* DwPTS Scheduling Changes Start */
31164 U8 cfi = cellDl->currCfi;
31166 /* DwPTS Scheduling Changes End */
31168 TRC2(rgSCHDlSiSched);
31171 crntTimInfo = cell->crntTime;
31172 #ifdef LTEMAC_HDFDD
31173 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31174 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31175 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31177 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA);
31180 /* Compute the subframe for which allocation is being made.
31181 Essentially, we need pointer to the dl frame for this subframe */
31182 sf = rgSCHUtlSubFrmGet(cell, crntTimInfo);
31184 /*Check if scheduling of MIB is required */
31186 /* since we are adding the MIB repetition logic for EMTC UEs, checking if
31187 * emtcEnabled or not, If enabled MIB would be repeted at as part of EMTC
31188 * feature, otherwise scheduling at (n,0) */
31189 if(0 == cell->emtcEnable)
31192 if((crntTimInfo.sfn % RGSCH_MIB_PERIODICITY == 0)
31193 && (RGSCH_MIB_TX_SF_NUM == crntTimInfo.slot))
31196 U8 sfnOctet, mibOct2 = 0;
31198 /*If MIB has not been yet setup by Application, return*/
31199 if(NULLP == cell->siCb.crntSiInfo.mib)
31202 SFndLenMsg(cell->siCb.crntSiInfo.mib, &mibLen);
31203 sf->bch.tbSize = mibLen;
31204 /*Fill the interface information */
31205 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, NULLD, NULLD);
31207 /*Set the bits of MIB to reflect SFN */
31208 /*First get the Most signficant 8 bits of SFN */
31209 sfnOctet = (U8)(crntTimInfo.sfn >> 2);
31210 /*Get the first two octets of MIB, and then update them
31211 using the SFN octet value obtained above.*/
31212 if(ROK != SExamMsg((Data *)(&mibOct1),
31213 cell->siCb.crntSiInfo.mib, 0))
31216 if(ROK != SExamMsg((Data *)(&mibOct2),
31217 cell->siCb.crntSiInfo.mib, 1))
31220 /* ccpu00114572- Fix for improper way of MIB Octet setting for SFN */
31221 mibOct1 = (mibOct1 & 0xFC) | (sfnOctet >> 6);
31222 mibOct2 = (mibOct2 & 0x03) | (sfnOctet << 2);
31223 /* ccpu00114572- Fix ends*/
31225 /*Now, replace the two octets in MIB */
31226 if(ROK != SRepMsg((Data)(mibOct1),
31227 cell->siCb.crntSiInfo.mib, 0))
31230 if(ROK != SRepMsg((Data)(mibOct2),
31231 cell->siCb.crntSiInfo.mib, 1))
31234 /*Copy the MIB msg buff into interface buffer */
31235 SCpyMsgMsg(cell->siCb.crntSiInfo.mib,
31236 rgSchCb[cell->instIdx].rgSchInit.region,
31237 rgSchCb[cell->instIdx].rgSchInit.pool,
31238 &subfrmAlloc->cmnLcInfo.bchInfo.pdu);
31239 /* Added Dl TB count for MIB message transmission
31240 * This counter is incremented 4 times to consider
31241 * the retransmission at the PHY level on PBCH channel*/
31243 cell->dlUlTbCnt.tbTransDlTotalCnt += RG_SCH_MIB_CNT;
31250 allocInfo->bcchAlloc.schdFirst = FALSE;
31251 /*Check if scheduling of SIB1 is required.
31252 Check of (crntTimInfo.sfn % RGSCH_SIB1_PERIODICITY == 0)
31253 is not required here since the below check takes care
31254 of SFNs applicable for this one too.*/
31255 if((crntTimInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
31256 && (RGSCH_SIB1_TX_SF_NUM == crntTimInfo.slot))
31258 /*If SIB1 has not been yet setup by Application, return*/
31259 if(NULLP == (cell->siCb.crntSiInfo.sib1Info.sib1))
31264 allocInfo->bcchAlloc.schdFirst = TRUE;
31265 mcs = cell->siCb.crntSiInfo.sib1Info.mcs;
31266 nPrb = cell->siCb.crntSiInfo.sib1Info.nPrb;
31267 msgLen = cell->siCb.crntSiInfo.sib1Info.msgLen;
31271 /*Check if scheduling of SI can be performed.*/
31272 Bool invalid = FALSE;
31274 if(cell->siCb.siCtx.siId == 0)
31277 /*Check if the Si-Window for the current Si-Context is completed*/
31278 invalid = rgSCHCmnChkPastWin(crntTimInfo, cell->siCb.siCtx.maxTimeToTx);
31281 /* LTE_ADV_FLAG_REMOVED_START */
31282 if(cell->siCb.siCtx.retxCntRem)
31284 RGSCHLOGERROR(cell->instIdx,ERRCLS_INT_PAR,ERG011,(ErrVal)cell->siCb.siCtx.siId,
31285 "rgSCHDlSiSched(): SI not scheduled and window expired");
31287 /* LTE_ADV_FLAG_REMOVED_END */
31288 if(cell->siCb.siCtx.warningSiFlag == TRUE)
31290 rgSCHUtlFreeWarningSiPdu(cell);
31291 cell->siCb.siCtx.warningSiFlag = FALSE;
31296 /*Check the timinginfo of the current SI-Context to see if its
31297 transmission can be scheduled. */
31298 if(FALSE == (rgSCHCmnChkInWin(crntTimInfo,
31299 cell->siCb.siCtx.timeToTx,
31300 cell->siCb.siCtx.maxTimeToTx)))
31305 /*Check if retransmission count has become 0*/
31306 if(0 == cell->siCb.siCtx.retxCntRem)
31311 /* LTE_ADV_FLAG_REMOVED_START */
31312 /* Check if ABS is enabled/configured */
31313 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31315 /* The pattern type is RGR_ABS_MUTE, then eNB need to blank the subframe */
31316 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
31318 /* Determine next scheduling subframe is ABS or not */
31319 if(RG_SCH_ABS_ENABLED_ABS_SF == (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern
31320 [((crntTimInfo.sfn*RGSCH_NUM_SUB_FRAMES) + crntTimInfo.slot) % RGR_ABS_PATTERN_LEN]))
31322 /* Skip the SI scheduling to next tti */
31327 /* LTE_ADV_FLAG_REMOVED_END */
31329 /*Schedule the transmission of the current SI-Context */
31330 /*Find out the messg length for the SI message */
31331 /* warningSiFlag is to differentiate between Warning SI
31333 if((rgSCHUtlGetMcsAndNPrb(cell, &nPrb, &mcs, &msgLen)) != ROK)
31338 cell->siCb.siCtx.i = RGSCH_CALC_SF_DIFF(crntTimInfo,
31339 cell->siCb.siCtx.timeToTx);
31343 /*Get the number of rb required */
31344 /*rgSCHCmnClcRbAllocForFxdTb(cell, msgLen, cellDl->ccchCqi, &rb);*/
31345 if(cellDl->bitsPerRb==0)
31347 while ((rgTbSzTbl[0][0][rb]) < (U32) (msgLen*8))
31355 rb = RGSCH_CEIL((msgLen*8), cellDl->bitsPerRb);
31357 /* DwPTS Scheduling Changes Start */
31359 if (sf->sfType == RG_SCH_SPL_SF_DATA)
31361 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
31363 /* Calculate the less RE's because of DwPTS */
31364 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
31366 /* Increase number of RBs in Spl SF to compensate for lost REs */
31367 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
31370 /* DwPTS Scheduling Changes End */
31371 /*ccpu00115595- end*/
31372 /* Additional check to see if required RBs
31373 * exceeds the available */
31374 if (rb > sf->bw - sf->bwAssigned)
31376 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHDlSiSched(): "
31377 "BW allocation failed CRNTI:%d",RGSCH_SI_RNTI);
31381 /* Update the subframe Allocated BW field */
31382 sf->bwAssigned = sf->bwAssigned + rb;
31384 /*Fill the parameters in allocInfo */
31385 allocInfo->bcchAlloc.rnti = RGSCH_SI_RNTI;
31386 allocInfo->bcchAlloc.dlSf = sf;
31387 allocInfo->bcchAlloc.rbsReq = rb;
31388 /*ccpu00116710- MCS is not getting assigned */
31389 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
31391 /* ccpu00117510 - ADD - Assignment of nPrb and other information */
31392 allocInfo->bcchAlloc.nPrb = nPrb;
31393 allocInfo->bcchAlloc.tbInfo[0].bytesReq = msgLen;
31394 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
31397 #endif /*RGR_SI_SCH*/
31400 /* ccpu00117452 - MOD - Changed macro name from
31401 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
31402 #ifdef RGR_CQI_REPT
31404 * @brief This function Updates the DL CQI for the UE.
31408 * Function: rgSCHCmnUeDlPwrCtColltCqiRept
31409 * Purpose: Manages PUSH N CQI reporting
31410 * Step 1: Store the CQI in collation array
31411 * Step 2: Increament the tracking count
31412 * Step 3: Check is it time to to send the report
31413 * Step 4: if yes, Send StaInd to RRM
31414 * Step 4.1: Fill StaInd for sending collated N CQI rpeorts
31415 * Step 4.2: Call utility function (rgSCHUtlRgrStaInd) to send rpts to RRM
31416 * Step 4.2.1: If sending was not sucessful, return RFAILED
31417 * Step 4.2.2: If sending was sucessful, return ROK
31418 * Step 5: If no, return
31419 * Invoked by: rgSCHCmnDlCqiInd
31421 * @param[in] RgSchCellCb *cell
31422 * @param[in] RgSchUeCb *ue
31423 * @param[in] RgrUeCqiRept *ueCqiRpt
31428 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept
31432 RgrUeCqiRept *ueCqiRpt
31435 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, ueCqiRpt)
31438 RgrUeCqiRept *ueCqiRpt;
31441 U8 *cqiCount = NULLP;
31443 RgrStaIndInfo *staInfo = NULLP;
31445 TRC2(rgSCHCmnUeDlPwrCtColltCqiRept)
31447 /* Step 1: Store the CQI in collation array */
31448 /* Step 2: Increament the tracking count */
31449 cqiCount = &(ue->schCqiInfo.cqiCount);
31450 ue->schCqiInfo.cqiRept[(*cqiCount)++] =
31454 /* Step 3: Check is it time to to send the report */
31455 if(RG_SCH_CQIR_IS_TIMTOSEND_CQIREPT(ue))
31457 /* Step 4: if yes, Send StaInd to RRM */
31458 retVal = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&staInfo,
31459 sizeof(RgrStaIndInfo));
31462 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
31463 "allocate memory for sending StaInd CRNTI:%d",ue->ueId);
31467 /* Step 4.1: Fill StaInd for sending collated N CQI rpeorts */
31470 extern U32 gCqiReptToAppCount;
31471 gCqiReptToAppCount++;
31476 retVal = rgSCHUtlFillSndStaInd(cell, ue, staInfo,
31477 ue->cqiReptCfgInfo.numColltdCqiRept);
31483 } /* End of rgSCHCmnUeDlPwrCtColltCqiRept */
31485 #endif /* End of RGR_CQI_REPT */
31488 * @brief This function checks for the retransmisson
31489 * for a DTX scenario.
31496 * @param[in] RgSchCellCb *cell
31497 * @param[in] RgSchUeCb *ue
31503 PUBLIC Void rgSCHCmnChkRetxAllowDtx
31507 RgSchDlHqProcCb *proc,
31511 PUBLIC Void rgSCHCmnChkRetxAllowDtx(cell, ueCb, proc, reTxAllwd)
31514 RgSchDlHqProcCb *proc;
31518 TRC3(rgSCHCmnChkRetxAllowDtx)
31523 if ((proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX))
31525 *reTxAllwd = FALSE;
31532 * @brief API for calculating the SI Set Id
31536 * Function: rgSCHCmnGetSiSetId
31538 * This API is used for calculating the SI Set Id, as shown below
31540 * siSetId = 0 siSetId = 1
31541 * |******************|******************|---------------->
31542 * (0,0) (8,0) (16,0) (SFN, SF)
31545 * @param[in] U16 sfn
31547 * @return U16 siSetId
31550 PUBLIC U16 rgSCHCmnGetSiSetId
31557 PUBLIC U16 rgSCHCmnGetSiSetId(sfn, sf, minPeriodicity)
31560 U16 minPeriodicity;
31563 /* 80 is the minimum SI periodicity in sf. Also
31564 * all other SI periodicities are multiples of 80 */
31565 RETVALUE (((sfn * RGSCH_NUM_SUB_FRAMES_5G) + sf) / (minPeriodicity * 10));
31569 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31573 * Function: rgSCHCmnCalcDwPtsTbSz
31575 * @param[in] RgSchCellCb *cell
31576 * @param[in] U32 bo
31577 * @param[in/out] U8 *rb
31578 * @param[in/out] U8 *iTbs
31579 * @param[in] U8 lyr
31580 * @param[in] U8 cfi
31584 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz
31594 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz(cell, bo, rb, iTbs, lyr, cfi)
31604 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31605 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31606 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31608 TRC2(rgSCHCmnCalcDwPtsTbSz);
31610 /* DwPts Rb cannot exceed the cell Bw */
31611 numDwPtsRb = RGSCH_MIN(numDwPtsRb, cellDl->maxDlBwPerUe);
31613 /* Adjust the iTbs for optimum usage of the DwPts region.
31614 * Using the same iTbs adjustment will not work for all
31615 * special subframe configurations and iTbs levels. Hence use the
31616 * static iTbs Delta table for adjusting the iTbs */
31617 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs);
31621 while(rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1] < bo*8 &&
31622 numDwPtsRb < cellDl->maxDlBwPerUe)
31627 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31631 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31639 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31643 * Function: rgSCHCmnCalcDwPtsTbSz2Cw
31645 * @param[in] RgSchCellCb *cell
31646 * @param[in] U32 bo
31647 * @param[in/out] U8 *rb
31648 * @param[in] U8 maxRb
31649 * @param[in/out] U8 *iTbs1
31650 * @param[in/out] U8 *iTbs2
31651 * @param[in] U8 lyr1
31652 * @param[in] U8 lyr2
31653 * @return[in/out] U32 *tb1Sz
31654 * @return[in/out] U32 *tb2Sz
31655 * @param[in] U8 cfi
31658 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw
31673 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, rb, maxRb, iTbs1, iTbs2,
31674 lyr1, lyr2, tb1Sz, tb2Sz, cfi)
31688 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31689 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31690 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31692 TRC2(rgSCHCmnCalcDwPtsTbSz2Cw);
31694 /* DwPts Rb cannot exceed the cell Bw */
31695 numDwPtsRb = RGSCH_MIN(numDwPtsRb, maxRb);
31697 /* Adjust the iTbs for optimum usage of the DwPts region.
31698 * Using the same iTbs adjustment will not work for all
31699 * special subframe configurations and iTbs levels. Hence use the
31700 * static iTbs Delta table for adjusting the iTbs */
31701 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs1);
31702 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs2);
31704 while((rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1] +
31705 rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1])< bo*8 &&
31706 numDwPtsRb < maxRb)
31711 *tb1Sz = rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31712 *tb2Sz = rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31722 * @brief Updates the GBR LCGs when datInd is received from MAC
31726 * Function: rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31727 * Purpose: This function updates the GBR LCGs
31728 * when datInd is received from MAC.
31732 * @param[in] RgSchCellCb *cell
31733 * @param[in] RgSchUeCb *ue
31734 * @param[in] RgInfUeDatInd *datInd
31738 PUBLIC Void rgSCHCmnUpdUeDataIndLcg
31742 RgInfUeDatInd *datInd
31745 PUBLIC Void rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31748 RgInfUeDatInd *datInd;
31752 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31754 Inst inst = cell->instIdx;
31757 TRC2(rgSCHCmnUpdUeDataIndLcg);
31759 for (idx = 0; (idx < RGINF_MAX_LCG_PER_UE - 1); idx++)
31761 if (datInd->lcgInfo[idx].bytesRcvd != 0)
31763 U8 lcgId = datInd->lcgInfo[idx].lcgId;
31764 U32 bytesRcvd = datInd->lcgInfo[idx].bytesRcvd;
31766 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
31768 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
31769 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
31771 if(bytesRcvd > cmnLcg->effGbr)
31773 bytesRcvd -= cmnLcg->effGbr;
31774 cmnLcg->effDeltaMbr = (cmnLcg->effDeltaMbr > bytesRcvd) ? \
31775 (cmnLcg->effDeltaMbr - bytesRcvd) : (0);
31776 cmnLcg->effGbr = 0;
31780 cmnLcg->effGbr -= bytesRcvd;
31782 /* To keep BS updated with the amount of data received for the GBR */
31783 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31784 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31785 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr+cmnLcg->effDeltaMbr);
31787 else if(lcgId != 0)
31789 ue->ul.effAmbr = (ue->ul.effAmbr > datInd->lcgInfo[idx].bytesRcvd) ? \
31790 (ue->ul.effAmbr - datInd->lcgInfo[idx].bytesRcvd) : (0);
31791 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31792 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31793 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
31794 ue->ul.nonGbrLcgBs = (ue->ul.nonGbrLcgBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31795 (ue->ul.nonGbrLcgBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31797 ue->ul.nonLcg0Bs = (ue->ul.nonLcg0Bs > datInd->lcgInfo[idx].bytesRcvd) ? \
31798 (ue->ul.nonLcg0Bs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31807 if(TRUE == ue->isEmtcUe)
31809 if (cellSch->apisEmtcUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31811 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31818 if (cellSch->apisUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31820 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31826 /** @brief This function initializes DL allocation lists and prepares
31831 * Function: rgSCHCmnInitRbAlloc
31833 * @param [in] RgSchCellCb *cell
31839 PRIVATE Void rgSCHCmnInitRbAlloc
31844 PRIVATE Void rgSCHCmnInitRbAlloc (cell)
31848 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31849 CmLteTimingInfo frm;
31853 TRC2(rgSCHCmnInitRbAlloc);
31855 /* Initializing RgSchCmnUlRbAllocInfo structure.*/
31856 rgSCHCmnInitDlRbAllocInfo(&cellSch->allocInfo);
31858 frm = cellSch->dl.time;
31860 dlSf = rgSCHUtlSubFrmGet(cell, frm);
31862 dlSf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
31863 dlSf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
31864 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
31866 dlSf->sfBeamInfo[idx].totVrbgAllocated = 0;
31867 dlSf->sfBeamInfo[idx].totVrbgRequired = 0;
31868 dlSf->sfBeamInfo[idx].vrbgStart = 0;
31871 dlSf->remUeCnt = cellSch->dl.maxUePerDlSf;
31872 /* Updating the Subframe information in RBAllocInfo */
31873 cellSch->allocInfo.dedAlloc.dedDlSf = dlSf;
31874 cellSch->allocInfo.msg4Alloc.msg4DlSf = dlSf;
31876 /* LTE_ADV_FLAG_REMOVED_START */
31877 /* Determine next scheduling subframe is ABS or not */
31878 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31880 cell->lteAdvCb.absPatternDlIdx =
31881 ((frm.sfn*RGSCH_NUM_SUB_FRAMES_5G) + frm.slot) % RGR_ABS_PATTERN_LEN;
31882 cell->lteAdvCb.absDlSfInfo = (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern[
31883 cell->lteAdvCb.absPatternDlIdx]);
31888 cell->lteAdvCb.absDlSfInfo = RG_SCH_ABS_DISABLED;
31890 /* LTE_ADV_FLAG_REMOVED_END */
31893 cellSch->allocInfo.ccchSduAlloc.ccchSduDlSf = dlSf;
31896 /* Update subframe-wide allocation information with SPS allocation */
31897 rgSCHCmnSpsDlUpdDlSfAllocWithSps(cell, frm, dlSf);
31906 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31911 * Function: rgSCHCmnSendTxModeInd(cell, ueUl, newTxMode)
31912 * Purpose: This function sends the TX mode Change
31913 * indication to RRM
31918 * @param[in] RgSchCellCb *cell
31919 * @param[in] RgSchUeCb *ue
31920 * @param[in] U8 newTxMode
31924 PRIVATE Void rgSCHCmnSendTxModeInd
31931 PRIVATE Void rgSCHCmnSendTxModeInd(cell, ue, newTxMode)
31937 RgmTransModeInd *txModeChgInd;
31938 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
31940 TRC2(rgSCHCmnSendTxModeInd);
31942 if(!(ueDl->mimoInfo.forceTD & RG_SCH_CMN_TD_TXMODE_RECFG))
31945 if(SGetSBuf(cell->rgmSap->sapCfg.sapPst.region,
31946 cell->rgmSap->sapCfg.sapPst.pool, (Data**)&txModeChgInd,
31947 sizeof(RgmTransModeInd)) != ROK)
31951 RG_SCH_FILL_RGM_TRANSMODE_IND(ue->ueId, cell->cellId, newTxMode, txModeChgInd);
31952 RgUiRgmChangeTransModeInd(&(cell->rgmSap->sapCfg.sapPst),
31953 cell->rgmSap->sapCfg.suId, txModeChgInd);
31956 ue->mimoInfo.txModUpChgFactor = 0;
31957 ue->mimoInfo.txModDownChgFactor = 0;
31958 ueDl->laCb[0].deltaiTbs = 0;
31964 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31969 * Function: rgSchCheckAndTriggerModeChange(cell, ueUl, iTbsNew)
31970 * Purpose: This function update and check for threashold for TM mode
31975 * @param[in] RgSchCellCb *cell
31976 * @param[in] RgSchUeCb *ue
31977 * @param[in] U8 iTbs
31981 PUBLIC Void rgSchCheckAndTriggerModeChange
31990 PUBLIC Void rgSchCheckAndTriggerModeChange(cell, ue, reportediTbs, previTbs, maxiTbs)
31998 RgrTxMode txMode; /*!< UE's Transmission Mode */
31999 RgrTxMode modTxMode; /*!< UE's Transmission Mode */
32001 TRC2(rgSchCheckAndTriggerModeChange);
32003 txMode = ue->mimoInfo.txMode;
32005 /* Check for Step down */
32006 /* Step down only when TM4 is configured. */
32007 if(RGR_UE_TM_4 == txMode)
32009 if((previTbs <= reportediTbs) && ((reportediTbs - previTbs) >= RG_SCH_MODE_CHNG_STEPDOWN_CHECK_FACTOR))
32011 ue->mimoInfo.txModDownChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32015 ue->mimoInfo.txModDownChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32018 ue->mimoInfo.txModDownChgFactor =
32019 RGSCH_MAX(ue->mimoInfo.txModDownChgFactor, -(RG_SCH_MODE_CHNG_STEPDOWN_THRSHD));
32021 if(ue->mimoInfo.txModDownChgFactor >= RG_SCH_MODE_CHNG_STEPDOWN_THRSHD)
32023 /* Trigger Mode step down */
32024 modTxMode = RGR_UE_TM_3;
32025 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32029 /* Check for Setup up */
32030 /* Step Up only when TM3 is configured, Max possible Mode is TM4*/
32031 if(RGR_UE_TM_3 == txMode)
32033 if((previTbs > reportediTbs) || (maxiTbs == previTbs))
32035 ue->mimoInfo.txModUpChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32039 ue->mimoInfo.txModUpChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32042 ue->mimoInfo.txModUpChgFactor =
32043 RGSCH_MAX(ue->mimoInfo.txModUpChgFactor, -(RG_SCH_MODE_CHNG_STEPUP_THRSHD));
32045 /* Check if TM step up need to be triggered */
32046 if(ue->mimoInfo.txModUpChgFactor >= RG_SCH_MODE_CHNG_STEPUP_THRSHD)
32048 /* Trigger mode chnage */
32049 modTxMode = RGR_UE_TM_4;
32050 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32059 * @brief Updates the GBR LCGs when datInd is received from MAC
32063 * Function: rgSCHCmnIsDlCsgPrio (cell)
32064 * Purpose: This function returns if csg UEs are
32065 * having priority at current time
32067 * Invoked by: Scheduler
32069 * @param[in] RgSchCellCb *cell
32070 * @param[in] RgSchUeCb *ue
32071 * @param[in] RgInfUeDatInd *datInd
32075 PUBLIC Bool rgSCHCmnIsDlCsgPrio
32080 PUBLIC Bool rgSCHCmnIsDlCsgPrio(cell)
32085 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
32087 TRC2(rgSCHCmnIsDlCsgPrio)
32088 /* Calculating the percentage resource allocated */
32089 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32095 if(((cmnDlCell->ncsgPrbCnt * 100) / cmnDlCell->totPrbCnt) < cell->minDlResNonCsg)
32107 * @brief Updates the GBR LCGs when datInd is received from MAC
32111 * Function: rgSCHCmnIsUlCsgPrio (cell)
32112 * Purpose: This function returns if csg UEs are
32113 * having priority at current time
32115 * Invoked by: Scheduler
32117 * @param[in] RgSchCellCb *cell
32118 * @param[in] RgSchUeCb *ue
32119 * @param[in] RgInfUeDatInd *datInd
32123 PUBLIC Bool rgSCHCmnIsUlCsgPrio
32128 PUBLIC Bool rgSCHCmnIsUlCsgPrio(cell)
32132 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
32134 TRC2(rgSCHCmnIsUlCsgPrio)
32136 /* Calculating the percentage resource allocated */
32137 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32143 if (((cmnUlCell->ncsgPrbCnt * 100) /cmnUlCell->totPrbCnt) < cell->minUlResNonCsg)
32154 /** @brief DL scheduler for SPS, and all other downlink data
32158 * Function: rgSchCmnPreDlSch
32160 * @param [in] Inst schInst;
32165 PUBLIC Void rgSchCmnPreDlSch
32167 RgSchCellCb **cell,
32169 RgSchCellCb **cellLst
32172 PUBLIC Void rgSchCmnPreDlSch(cell, nCell, cellLst)
32173 RgSchCellCb **cell;
32175 RgSchCellCb **cellLst;
32178 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell[0]);
32182 TRC2(rgSchCmnPreDlSch);
32184 if(nCell > CM_LTE_MAX_CELLS)
32189 if (cell[0]->isDlDataAllwd && (cell[0]->stopDlSch == FALSE))
32191 /* Specific DL scheduler to perform UE scheduling */
32192 cellSch->apisDl->rgSCHDlPreSched(cell[0]);
32194 /* Rearranging the cell entries based on their remueCnt in SF.
32195 * cells will be processed in the order of number of ue scheduled
32197 for (idx = 0; idx < nCell; idx++)
32200 cellSch = RG_SCH_CMN_GET_CELL(cell[idx]);
32201 sf = cellSch->allocInfo.dedAlloc.dedDlSf;
32205 cellLst[idx] = cell[idx];
32209 for(j = 0; j < idx; j++)
32211 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cellLst[j]);
32212 RgSchDlSf *subfrm = cmnCell->allocInfo.dedAlloc.dedDlSf;
32214 if(sf->remUeCnt < subfrm->remUeCnt)
32217 for(k = idx; k > j; k--)
32219 cellLst[k] = cellLst[k-1];
32224 cellLst[j] = cell[idx];
32229 for (idx = 0; idx < nCell; idx++)
32231 cellLst[idx] = cell[idx];
32237 /** @brief DL scheduler for SPS, and all other downlink data
32240 * Function: rgSchCmnPstDlSch
32242 * @param [in] Inst schInst;
32247 PUBLIC Void rgSchCmnPstDlSch
32252 PUBLIC Void rgSchCmnPstDlSch(cell)
32256 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32258 TRC2(rgSchCmnPstDlSch);
32260 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
32262 cellSch->apisDl->rgSCHDlPstSched(cell->instIdx);
32267 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
32273 PUBLIC U8 rgSCHCmnCalcPcqiBitSz(ueCb, numTxAnt)
32281 RgSchUePCqiCb *cqiCb = ueCb->nPCqiCb;
32283 TRC3(rgSCHCmnCalcPcqiBitSz);
32285 confRepMode = cqiCb->cqiCfg.cqiSetup.prdModeEnum;
32286 if((ueCb->mimoInfo.txMode != RGR_UE_TM_3) &&
32287 (ueCb->mimoInfo.txMode != RGR_UE_TM_4))
32293 ri = cqiCb->perRiVal;
32295 switch(confRepMode)
32297 case RGR_PRD_CQI_MOD10:
32303 case RGR_PRD_CQI_MOD11:
32316 else if(numTxAnt == 4)
32329 /* This is number of antenna case 1.
32330 * This is not applicable for Mode 1-1.
32331 * So setting it to invalid value */
32337 case RGR_PRD_CQI_MOD20:
32345 pcqiSz = 4 + cqiCb->label;
32350 case RGR_PRD_CQI_MOD21:
32365 else if(numTxAnt == 4)
32378 /* This might be number of antenna case 1.
32379 * For mode 2-1 wideband case only antenna port 2 or 4 is supported.
32380 * So setting invalid value.*/
32388 pcqiSz = 4 + cqiCb->label;
32392 pcqiSz = 7 + cqiCb->label;
32405 /** @brief DL scheduler for SPS, and all other downlink data
32409 * Function: rgSCHCmnDlSch
32411 * @param [in] RgSchCellCb *cell
32417 PUBLIC Void rgSCHCmnDlSch
32422 PUBLIC Void rgSCHCmnDlSch (cell)
32427 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32429 RgSchDynTddCb *rgSchDynTddInfo = &(rgSchCb[cell->instIdx].rgSchDynTdd);
32433 TRC2(rgSCHCmnDlSch);
32435 dlSf = rgSCHUtlSubFrmGet(cell, cellSch->dl.time);
32437 if (rgSchDynTddInfo->isDynTddEnbld)
32439 RG_SCH_DYN_TDD_GET_SFIDX(dlCntrlSfIdx, rgSchDynTddInfo->crntDTddSfIdx,
32440 RG_SCH_CMN_DL_DELTA);
32441 if(RG_SCH_DYNTDD_DLC_ULD == rgSchDynTddInfo->sfInfo[dlCntrlSfIdx].sfType)
32443 if(1 == cell->cellId)
32445 ul5gtfsidDlAlreadyMarkUl++;
32447 printf("ul5gtfsidDlAlreadyMarkUl: %d, [sfn:sf] [%04d:%02d]\n",
32448 ul5gtfsidDlAlreadyMarkUl, cellSch->dl.time.sfn,
32449 cellSch->dl.time.slot);
32457 /* Specific DL scheduler to perform UE scheduling */
32458 cellSch->apisDl->rgSCHDlNewSched(cell, &cellSch->allocInfo);
32459 /* LTE_ADV_FLAG_REMOVED_END */
32461 /* call common allocator for RB Allocation */
32462 rgSCHCmnDlRbAlloc(cell, &cellSch->allocInfo);
32464 /* Finalize the Allocations for reqested Against alloced */
32465 rgSCHCmnDlAllocFnlz(cell);
32467 /* Perform Pdcch allocations for PDCCH Order Q.
32468 * As of now, giving this the least preference.
32469 * This func call could be moved above other allocations
32471 rgSCHCmnGenPdcchOrder(cell, dlSf);
32473 /* Do group power control for PUCCH */
32474 rgSCHCmnGrpPwrCntrlPucch(cell, dlSf);
32479 /**********************************************************************
32482 **********************************************************************/