phydm_iqk_8821c.c 78 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478
  1. #include "mp_precomp.h"
  2. #include "../phydm_precomp.h"
  3. #if (RTL8821C_SUPPORT == 1)
  4. /*---------------------------Define Local Constant---------------------------*/
  5. static u32 dpk_result[DPK_BACKUP_REG_NUM_8821C] ;
  6. #define enable_8821c_dpk 1
  7. #define dpk_forcein_sram4 0
  8. /*---------------------------Define Local Constant---------------------------*/
  9. #if !(DM_ODM_SUPPORT_TYPE & ODM_AP)
  10. void do_iqk_8821c(
  11. void *p_dm_void,
  12. u8 delta_thermal_index,
  13. u8 thermal_value,
  14. u8 threshold
  15. )
  16. {
  17. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  18. struct _ADAPTER *adapter = p_dm_odm->adapter;
  19. HAL_DATA_TYPE *p_hal_data = GET_HAL_DATA(adapter);
  20. odm_reset_iqk_result(p_dm_odm);
  21. p_dm_odm->rf_calibrate_info.thermal_value_iqk = thermal_value;
  22. phy_iq_calibrate_8821c(p_dm_odm, true);
  23. }
  24. #else
  25. /*Originally p_config->do_iqk is hooked phy_iq_calibrate_8821c, but do_iqk_8821c and phy_iq_calibrate_8821c have different arguments*/
  26. void do_iqk_8821c(
  27. void *p_dm_void,
  28. u8 delta_thermal_index,
  29. u8 thermal_value,
  30. u8 threshold
  31. )
  32. {
  33. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  34. boolean is_recovery = (boolean) delta_thermal_index;
  35. phy_iq_calibrate_8821c(p_dm_odm, true);
  36. }
  37. #endif
  38. void do_dpk_8821c(
  39. void *p_dm_void,
  40. u8 delta_thermal_index,
  41. u8 thermal_value,
  42. u8 threshold
  43. )
  44. {
  45. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  46. boolean is_recovery = (boolean) delta_thermal_index;
  47. phy_dp_calibrate_8821c(p_dm_odm, true);
  48. }
  49. void
  50. _iqk_check_coex_status(
  51. struct PHY_DM_STRUCT *p_dm_odm,
  52. boolean beforeK
  53. )
  54. {
  55. u8 u1b_tmp;
  56. u16 count = 0;
  57. u8 h2c_parameter;
  58. h2c_parameter = 1;
  59. if (beforeK) {
  60. u1b_tmp = odm_read_1byte(p_dm_odm, 0x49c);
  61. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]check 0x49c[0] = 0x%x before h2c 0x6d\n", u1b_tmp));
  62. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]check 0x49c[0] = 0x%x before h2c 0x6d\n", u1b_tmp));
  63. /*check if BT IQK */
  64. u1b_tmp = odm_read_1byte(p_dm_odm, 0x49c);
  65. while ((u1b_tmp & BIT(1)) && (count < 100)) {
  66. ODM_delay_ms(10);
  67. u1b_tmp = odm_read_1byte(p_dm_odm, 0x49c);
  68. count++;
  69. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]check 0x49c[1]=0x%x, count = %d\n", u1b_tmp, count));
  70. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]check 0x49c[1]=0x%x, count = %d\n", u1b_tmp, count));
  71. }
  72. #if 1
  73. odm_fill_h2c_cmd(p_dm_odm, ODM_H2C_WIFI_CALIBRATION, 1, &h2c_parameter);
  74. u1b_tmp = odm_read_1byte(p_dm_odm, 0x49c);
  75. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]check 0x49c[0] = 0x%x after h2c 0x6d\n", u1b_tmp));
  76. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]check 0x49c[0] = 0x%x after h2c 0x6d\n", u1b_tmp));
  77. u1b_tmp = odm_read_1byte(p_dm_odm, 0x49c);
  78. /*check if WL IQK available form WL FW */
  79. while ((!(u1b_tmp & BIT(0))) && (count < 100)) {
  80. ODM_delay_ms(10);
  81. u1b_tmp = odm_read_1byte(p_dm_odm, 0x49c);
  82. count++;
  83. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]check 0x49c[1]=0x%x, count = %d\n", u1b_tmp, count));
  84. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]check 0x49c[1]=0x%x, count = %d\n", u1b_tmp, count));
  85. }
  86. if (count >= 100)
  87. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]Polling 0x49c to 1 for WiFi calibration H2C cmd FAIL! count(%d)\n", count));
  88. #endif
  89. } else
  90. odm_set_bb_reg(p_dm_odm, 0x49c, BIT(0), 0x0);
  91. }
  92. u32
  93. _iqk_indirect_read_reg(
  94. struct PHY_DM_STRUCT *p_dm_odm,
  95. u16 reg_addr
  96. )
  97. {
  98. u32 j = 0;
  99. /*wait for ready bit before access 0x1700*/
  100. odm_write_4byte(p_dm_odm, 0x1700, 0x800f0000 | reg_addr);
  101. do {
  102. j++;
  103. } while (((odm_read_1byte(p_dm_odm, 0x1703) & BIT(5)) == 0) && (j < 30000));
  104. return odm_read_4byte(p_dm_odm, 0x1708); /*get read data*/
  105. }
  106. void
  107. _iqk_indirect_write_reg(
  108. struct PHY_DM_STRUCT *p_dm_odm,
  109. u16 reg_addr,
  110. u32 bit_mask,
  111. u32 reg_value
  112. )
  113. {
  114. u32 val, i = 0, j = 0, bitpos = 0;
  115. if (bit_mask == 0x0)
  116. return;
  117. if (bit_mask == 0xffffffff) {
  118. odm_write_4byte(p_dm_odm, 0x1704, reg_value); /*put write data*/
  119. /*wait for ready bit before access 0x1700*/
  120. do {
  121. j++;
  122. } while (((odm_read_1byte(p_dm_odm, 0x1703) & BIT(5)) == 0) && (j < 30000));
  123. odm_write_4byte(p_dm_odm, 0x1700, 0xc00f0000 | reg_addr);
  124. } else {
  125. for (i = 0; i <= 31; i++) {
  126. if (((bit_mask >> i) & 0x1) == 0x1) {
  127. bitpos = i;
  128. break;
  129. }
  130. }
  131. /*read back register value before write*/
  132. val = _iqk_indirect_read_reg(p_dm_odm, reg_addr);
  133. val = (val & (~bit_mask)) | (reg_value << bitpos);
  134. odm_write_4byte(p_dm_odm, 0x1704, val); /*put write data*/
  135. /*wait for ready bit before access 0x1700*/
  136. do {
  137. j++;
  138. } while (((odm_read_1byte(p_dm_odm, 0x1703) & BIT(5)) == 0) && (j < 30000));
  139. odm_write_4byte(p_dm_odm, 0x1700, 0xc00f0000 | reg_addr);
  140. }
  141. }
  142. void
  143. _iqk_set_gnt_wl_high(
  144. struct PHY_DM_STRUCT *p_dm_odm
  145. )
  146. {
  147. u32 val = 0;
  148. u8 state = 0x1, sw_control = 0x1;
  149. /*GNT_WL = 1*/
  150. val = (sw_control) ? ((state << 1) | 0x1) : 0;
  151. _iqk_indirect_write_reg(p_dm_odm, 0x38, 0x3000, val); /*0x38[13:12]*/
  152. _iqk_indirect_write_reg(p_dm_odm, 0x38, 0x0300, val); /*0x38[9:8]*/
  153. }
  154. void _iqk_set_gnt_bt_low(
  155. struct PHY_DM_STRUCT *p_dm_odm
  156. )
  157. {
  158. u32 val = 0;
  159. u8 state = 0x0, sw_control = 0x1;
  160. /*GNT_BT = 0*/
  161. val = (sw_control) ? ((state << 1) | 0x1) : 0;
  162. _iqk_indirect_write_reg(p_dm_odm, 0x38, 0xc000, val); /*0x38[15:14]*/
  163. _iqk_indirect_write_reg(p_dm_odm, 0x38, 0x0c00, val); /*0x38[11:10]*/
  164. }
  165. void _iqk_set_gnt_wl_gnt_bt(
  166. struct PHY_DM_STRUCT *p_dm_odm,
  167. boolean beforeK
  168. )
  169. {
  170. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  171. if (beforeK) {
  172. _iqk_set_gnt_wl_high(p_dm_odm);
  173. _iqk_set_gnt_bt_low(p_dm_odm);
  174. } else
  175. _iqk_indirect_write_reg(p_dm_odm, 0x38, MASKDWORD, p_iqk_info->tmp_GNTWL);
  176. }
  177. void
  178. _iqk_fill_iqk_report_8821c(
  179. void *p_dm_void,
  180. u8 channel
  181. )
  182. {
  183. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  184. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  185. u32 tmp1 = 0x0, tmp2 = 0x0, tmp3 = 0x0;
  186. u8 i;
  187. for (i = 0; i < SS_8821C; i++) {
  188. tmp1 = tmp1 + ((p_iqk_info->IQK_fail_report[channel][i][TX_IQK] & 0x1) << i);
  189. tmp2 = tmp2 + ((p_iqk_info->IQK_fail_report[channel][i][RX_IQK] & 0x1) << (i + 4));
  190. tmp3 = tmp3 + ((p_iqk_info->RXIQK_fail_code[channel][i] & 0x3) << (i * 2 + 8));
  191. }
  192. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008);
  193. odm_set_bb_reg(p_dm_odm, 0x1bf0, 0x00ffffff, tmp1 | tmp2 | tmp3);
  194. for (i = 0; i < SS_8821C; i++)
  195. odm_write_4byte(p_dm_odm, 0x1be8 + (i * 4), (p_iqk_info->RXIQK_AGC[channel][(i * 2) + 1] << 16) | p_iqk_info->RXIQK_AGC[channel][i * 2]);
  196. }
  197. void
  198. _iqk_iqk_fail_report_8821c(
  199. struct PHY_DM_STRUCT *p_dm_odm
  200. )
  201. {
  202. u32 tmp1bf0 = 0x0;
  203. u8 i;
  204. tmp1bf0 = odm_read_4byte(p_dm_odm, 0x1bf0);
  205. for (i = 0; i < 4; i++) {
  206. if (tmp1bf0 & (0x1 << i))
  207. #if !(DM_ODM_SUPPORT_TYPE & ODM_AP)
  208. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK] please check S%d TXIQK\n", i));
  209. #else
  210. panic_printk("[IQK] please check S%d TXIQK\n", i);
  211. #endif
  212. if (tmp1bf0 & (0x1 << (i + 12)))
  213. #if !(DM_ODM_SUPPORT_TYPE & ODM_AP)
  214. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK] please check S%d RXIQK\n", i));
  215. #else
  216. panic_printk("[IQK] please check S%d RXIQK\n", i);
  217. #endif
  218. }
  219. }
  220. void
  221. _iqk_backup_mac_bb_8821c(
  222. struct PHY_DM_STRUCT *p_dm_odm,
  223. u32 *MAC_backup,
  224. u32 *BB_backup,
  225. u32 *backup_mac_reg,
  226. u32 *backup_bb_reg,
  227. u8 num_backup_bb_reg
  228. )
  229. {
  230. u32 i;
  231. for (i = 0; i < MAC_REG_NUM_8821C; i++)
  232. MAC_backup[i] = odm_read_4byte(p_dm_odm, backup_mac_reg[i]);
  233. for (i = 0; i < num_backup_bb_reg; i++)
  234. BB_backup[i] = odm_read_4byte(p_dm_odm, backup_bb_reg[i]);
  235. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]BackupMacBB Success!!!!\n")); */
  236. }
  237. void
  238. _iqk_backup_rf_8821c(
  239. struct PHY_DM_STRUCT *p_dm_odm,
  240. u32 RF_backup[][SS_8821C],
  241. u32 *backup_rf_reg
  242. )
  243. {
  244. u32 i, j;
  245. for (i = 0; i < RF_REG_NUM_8821C; i++)
  246. for (j = 0; j < SS_8821C; j++)
  247. RF_backup[i][j] = odm_get_rf_reg(p_dm_odm, j, backup_rf_reg[i], RFREGOFFSETMASK);
  248. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]BackupRF Success!!!!\n")); */
  249. }
  250. void
  251. _iqk_agc_bnd_int_8821c(
  252. struct PHY_DM_STRUCT *p_dm_odm
  253. )
  254. {
  255. /*initialize RX AGC bnd, it must do after bbreset*/
  256. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008);
  257. odm_write_4byte(p_dm_odm, 0x1b00, 0xf80a7008);
  258. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8015008);
  259. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008);
  260. /*ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]init. rx agc bnd\n"));*/
  261. }
  262. void
  263. _iqk_bb_reset_8821c(
  264. struct PHY_DM_STRUCT *p_dm_odm
  265. )
  266. {
  267. boolean cca_ing = false;
  268. u32 count = 0;
  269. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x0, RFREGOFFSETMASK, 0x10000);
  270. while (1) {
  271. odm_write_4byte(p_dm_odm, 0x8fc, 0x0);
  272. odm_set_bb_reg(p_dm_odm, 0x198c, 0x7, 0x7);
  273. cca_ing = (boolean) odm_get_bb_reg(p_dm_odm, 0xfa0, BIT(3));
  274. if (count > 30)
  275. cca_ing = false;
  276. if (cca_ing) {
  277. ODM_delay_ms(1);
  278. count++;
  279. } else {
  280. odm_write_1byte(p_dm_odm, 0x808, 0x0); /*RX ant off*/
  281. odm_set_bb_reg(p_dm_odm, 0xa04, BIT(27) | BIT26 | BIT25 | BIT24, 0x0); /*CCK RX path off*/
  282. /*BBreset*/
  283. odm_set_bb_reg(p_dm_odm, 0x0, BIT(16), 0x0);
  284. odm_set_bb_reg(p_dm_odm, 0x0, BIT(16), 0x1);
  285. if (odm_get_bb_reg(p_dm_odm, 0x660, BIT(16)))
  286. odm_write_4byte(p_dm_odm, 0x6b4, 0x89000006);
  287. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]BBreset!!!!\n"));
  288. break;
  289. }
  290. }
  291. }
  292. void
  293. _iqk_afe_setting_8821c(
  294. struct PHY_DM_STRUCT *p_dm_odm,
  295. boolean do_iqk
  296. )
  297. {
  298. if (do_iqk) {
  299. /*IQK AFE setting RX_WAIT_CCA mode */
  300. odm_write_4byte(p_dm_odm, 0xc60, 0x50000000);
  301. odm_write_4byte(p_dm_odm, 0xc60, 0x700F0040);
  302. /*AFE setting*/
  303. odm_write_4byte(p_dm_odm, 0xc58, 0xd8000402);
  304. odm_write_4byte(p_dm_odm, 0xc5c, 0xd1000120);
  305. odm_write_4byte(p_dm_odm, 0xc6c, 0x00000a15);
  306. _iqk_bb_reset_8821c(p_dm_odm);
  307. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]AFE setting for IQK mode!!!!\n")); */
  308. } else {
  309. /*IQK AFE setting RX_WAIT_CCA mode */
  310. odm_write_4byte(p_dm_odm, 0xc60, 0x50000000);
  311. odm_write_4byte(p_dm_odm, 0xc60, 0x700B8040);
  312. /*AFE setting*/
  313. odm_write_4byte(p_dm_odm, 0xc58, 0xd8020402);
  314. odm_write_4byte(p_dm_odm, 0xc5c, 0xde000120);
  315. odm_write_4byte(p_dm_odm, 0xc6c, 0x0000122a);
  316. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]AFE setting for Normal mode!!!!\n")); */
  317. }
  318. }
  319. void
  320. _iqk_restore_mac_bb_8821c(
  321. struct PHY_DM_STRUCT *p_dm_odm,
  322. u32 *MAC_backup,
  323. u32 *BB_backup,
  324. u32 *backup_mac_reg,
  325. u32 *backup_bb_reg,
  326. u8 num_backup_bb_reg
  327. )
  328. {
  329. u32 i;
  330. for (i = 0; i < MAC_REG_NUM_8821C; i++)
  331. odm_write_4byte(p_dm_odm, backup_mac_reg[i], MAC_backup[i]);
  332. for (i = 0; i < num_backup_bb_reg; i++)
  333. odm_write_4byte(p_dm_odm, backup_bb_reg[i], BB_backup[i]);
  334. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]RestoreMacBB Success!!!!\n")); */
  335. }
  336. void
  337. _iqk_restore_rf_8821c(
  338. struct PHY_DM_STRUCT *p_dm_odm,
  339. u32 *backup_rf_reg,
  340. u32 RF_backup[][SS_8821C]
  341. )
  342. {
  343. u32 i;
  344. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xef, RFREGOFFSETMASK, 0x0);
  345. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xee, RFREGOFFSETMASK, 0x0);
  346. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xdf, RFREGOFFSETMASK, RF_backup[0][ODM_RF_PATH_A] & (~BIT(4)));
  347. /*odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xde, RFREGOFFSETMASK, RF_backup[1][ODM_RF_PATH_A]|BIT4);*/
  348. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xde, RFREGOFFSETMASK, RF_backup[1][ODM_RF_PATH_A] & (~BIT(4)));
  349. for (i = 2; i < (RF_REG_NUM_8821C-1); i++)
  350. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, backup_rf_reg[i], RFREGOFFSETMASK, RF_backup[i][ODM_RF_PATH_A]);
  351. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x1, RFREGOFFSETMASK, (RF_backup[5][ODM_RF_PATH_A] & (~BIT(0))));
  352. /*ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]RestoreRF Success!!!!\n")); */
  353. }
  354. void
  355. _iqk_backup_iqk_8821c(
  356. struct PHY_DM_STRUCT *p_dm_odm,
  357. u8 step
  358. )
  359. {
  360. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  361. u8 i, j, k, path, idx;
  362. u32 tmp;
  363. u16 iqk_apply[2] = {0xc94, 0xe94};
  364. if (step == 0x0) {
  365. p_iqk_info->iqk_channel[1] = p_iqk_info->iqk_channel[0];
  366. for (i = 0; i < SS_8821C; i++) {
  367. p_iqk_info->LOK_IDAC[1][i] = p_iqk_info->LOK_IDAC[0][i];
  368. p_iqk_info->RXIQK_AGC[1][i] = p_iqk_info->RXIQK_AGC[0][i];
  369. p_iqk_info->bypass_iqk[1][i] = p_iqk_info->bypass_iqk[0][i];
  370. p_iqk_info->RXIQK_fail_code[1][i] = p_iqk_info->RXIQK_fail_code[0][i];
  371. for (j = 0; j < 2; j++) {
  372. p_iqk_info->IQK_fail_report[1][i][j] = p_iqk_info->IQK_fail_report[0][i][j];
  373. for (k = 0; k < 8; k++) {
  374. p_iqk_info->IQK_CFIR_real[1][i][j][k] = p_iqk_info->IQK_CFIR_real[0][i][j][k];
  375. p_iqk_info->IQK_CFIR_imag[1][i][j][k] = p_iqk_info->IQK_CFIR_imag[0][i][j][k];
  376. }
  377. }
  378. }
  379. for (i = 0; i < 4; i++) {
  380. p_iqk_info->RXIQK_fail_code[0][i] = 0x0;
  381. p_iqk_info->RXIQK_AGC[0][i] = 0x0;
  382. for (j = 0; j < 2; j++) {
  383. p_iqk_info->IQK_fail_report[0][i][j] = true;
  384. p_iqk_info->gs_retry_count[0][i][j] = 0x0;
  385. }
  386. for (j = 0; j < 3; j++)
  387. p_iqk_info->retry_count[0][i][j] = 0x0;
  388. }
  389. } else {
  390. p_iqk_info->iqk_channel[0] = p_iqk_info->rf_reg18;
  391. for (path = 0; path < SS_8821C; path++) {
  392. p_iqk_info->LOK_IDAC[0][path] = odm_get_rf_reg(p_dm_odm, path, 0x58, RFREGOFFSETMASK);
  393. p_iqk_info->bypass_iqk[0][path] = odm_get_bb_reg(p_dm_odm, iqk_apply[path], MASKDWORD);
  394. for (idx = 0; idx < 2; idx++) {
  395. odm_set_bb_reg(p_dm_odm, 0x1b00, MASKDWORD, 0xf8000008 | path << 1);
  396. if (idx == 0)
  397. odm_set_bb_reg(p_dm_odm, 0x1b0c, BIT(13) | BIT(12), 0x3);
  398. else
  399. odm_set_bb_reg(p_dm_odm, 0x1b0c, BIT(13) | BIT(12), 0x1);
  400. odm_set_bb_reg(p_dm_odm, 0x1bd4, BIT(20) | BIT(19) | BIT(18) | BIT(17) | BIT(16), 0x10);
  401. for (i = 0; i < 8; i++) {
  402. odm_set_bb_reg(p_dm_odm, 0x1bd8, MASKDWORD, 0xe0000001 + (i * 4));
  403. tmp = odm_get_bb_reg(p_dm_odm, 0x1bfc, MASKDWORD);
  404. p_iqk_info->IQK_CFIR_real[0][path][idx][i] = (tmp & 0x0fff0000) >> 16;
  405. p_iqk_info->IQK_CFIR_imag[0][path][idx][i] = tmp & 0xfff;
  406. }
  407. }
  408. odm_set_bb_reg(p_dm_odm, 0x1bd8, MASKDWORD, 0x0);
  409. odm_set_bb_reg(p_dm_odm, 0x1b0c, BIT(13) | BIT(12), 0x0);
  410. }
  411. }
  412. }
  413. void
  414. _iqk_reload_iqk_setting_8821c(
  415. struct PHY_DM_STRUCT *p_dm_odm,
  416. u8 channel,
  417. u8 reload_idx /*1: reload TX, 2: reload LO, TX, RX*/
  418. )
  419. {
  420. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  421. u8 i, path, idx;
  422. u16 iqk_apply[2] = {0xc94, 0xe94};
  423. for (path = 0; path < SS_8821C; path++) {
  424. #if 0
  425. if (reload_idx == 2) {
  426. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xdf, BIT(4), 0x1);
  427. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x58, RFREGOFFSETMASK, p_iqk_info->LOK_IDAC[channel][path]);
  428. }
  429. #endif
  430. for (idx = 0; idx < reload_idx; idx++) {
  431. odm_set_bb_reg(p_dm_odm, 0x1b00, MASKDWORD, 0xf8000008 | path << 1);
  432. odm_set_bb_reg(p_dm_odm, 0x1b2c, MASKDWORD, 0x7);
  433. odm_set_bb_reg(p_dm_odm, 0x1b38, MASKDWORD, 0x20000000);
  434. odm_set_bb_reg(p_dm_odm, 0x1b3c, MASKDWORD, 0x20000000);
  435. odm_set_bb_reg(p_dm_odm, 0x1bcc, MASKDWORD, 0x00000000);
  436. if (idx == 0)
  437. odm_set_bb_reg(p_dm_odm, 0x1b0c, BIT(13) | BIT(12), 0x3);
  438. else
  439. odm_set_bb_reg(p_dm_odm, 0x1b0c, BIT(13) | BIT(12), 0x1);
  440. odm_set_bb_reg(p_dm_odm, 0x1bd4, BIT(20) | BIT(19) | BIT(18) | BIT(17) | BIT(16), 0x10);
  441. for (i = 0; i < 8; i++) {
  442. odm_write_4byte(p_dm_odm, 0x1bd8, ((0xc0000000 >> idx) + 0x3) + (i * 4) + (p_iqk_info->IQK_CFIR_real[channel][path][idx][i] << 9));
  443. odm_write_4byte(p_dm_odm, 0x1bd8, ((0xc0000000 >> idx) + 0x1) + (i * 4) + (p_iqk_info->IQK_CFIR_imag[channel][path][idx][i] << 9));
  444. }
  445. }
  446. odm_set_bb_reg(p_dm_odm, iqk_apply[path], MASKDWORD, p_iqk_info->bypass_iqk[channel][path]);
  447. odm_set_bb_reg(p_dm_odm, 0x1bd8, MASKDWORD, 0x0);
  448. odm_set_bb_reg(p_dm_odm, 0x1b0c, BIT(13) | BIT(12), 0x0);
  449. }
  450. }
  451. boolean
  452. _iqk_reload_iqk_8821c(
  453. struct PHY_DM_STRUCT *p_dm_odm,
  454. boolean reset
  455. )
  456. {
  457. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  458. u8 i;
  459. boolean reload = false;
  460. if (reset) {
  461. for (i = 0; i < 2; i++)
  462. p_iqk_info->iqk_channel[i] = 0x0;
  463. } else {
  464. p_iqk_info->rf_reg18 = odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x18, RFREGOFFSETMASK);
  465. for (i = 0; i < 2; i++) {
  466. if (p_iqk_info->rf_reg18 == p_iqk_info->iqk_channel[i]) {
  467. _iqk_reload_iqk_setting_8821c(p_dm_odm, i, 2);
  468. _iqk_fill_iqk_report_8821c(p_dm_odm, i);
  469. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]reload IQK result before!!!!\n"));
  470. reload = true;
  471. }
  472. }
  473. }
  474. return reload;
  475. }
  476. void
  477. _iqk_rfe_setting_8821c(
  478. struct PHY_DM_STRUCT *p_dm_odm,
  479. boolean ext_pa_on
  480. )
  481. {
  482. if (ext_pa_on) {
  483. /*RFE setting*/
  484. odm_write_4byte(p_dm_odm, 0xcb0, 0x77777777);
  485. odm_write_4byte(p_dm_odm, 0xcb4, 0x00007777);
  486. odm_write_4byte(p_dm_odm, 0xcbc, 0x0000083B);
  487. /*odm_write_4byte(p_dm_odm, 0x1990, 0x00000c30);*/
  488. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]external PA on!!!!\n"));
  489. } else {
  490. /*RFE setting*/
  491. odm_write_4byte(p_dm_odm, 0xcb0, 0x77171117);
  492. odm_write_4byte(p_dm_odm, 0xcb4, 0x00001177);
  493. odm_write_4byte(p_dm_odm, 0xcbc, 0x00000404);
  494. /*odm_write_4byte(p_dm_odm, 0x1990, 0x00000c30);*/
  495. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]external PA off!!!!\n"));*/
  496. }
  497. }
  498. void
  499. _iqk_rfsetting_8821c(
  500. struct PHY_DM_STRUCT *p_dm_odm
  501. )
  502. {
  503. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  504. u8 path;
  505. u32 tmp;
  506. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008);
  507. odm_write_4byte(p_dm_odm, 0x1bb8, 0x00000000);
  508. for (path = 0; path < SS_8821C; path++) {
  509. /*0xdf:B11 = 1,B4 = 0, B1 = 1*/
  510. tmp = odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xdf, RFREGOFFSETMASK);
  511. tmp = (tmp & (~BIT(4))) | BIT(1) | BIT(11);
  512. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xdf, RFREGOFFSETMASK, tmp);
  513. if (p_iqk_info->is_BTG) {
  514. tmp = odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xde, RFREGOFFSETMASK);
  515. tmp = (tmp & (~BIT(4))) | BIT(15);
  516. /*tmp = tmp|BIT4|BIT15; //manual LOK value for A-cut*/
  517. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xde, RFREGOFFSETMASK, tmp);
  518. }
  519. if (!p_iqk_info->is_BTG) {
  520. /*WLAN_AG*/
  521. /*TX IQK mode init*/
  522. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xef, RFREGOFFSETMASK, 0x80000);
  523. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x33, RFREGOFFSETMASK, 0x00024);
  524. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x3e, RFREGOFFSETMASK, 0x0003f);
  525. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x3f, RFREGOFFSETMASK, 0x60fde);
  526. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xef, RFREGOFFSETMASK, 0x00000);
  527. if (*p_dm_odm->p_band_type == ODM_BAND_5G) {
  528. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xef, BIT(19), 0x1);
  529. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x33, RFREGOFFSETMASK, 0x00026);
  530. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x3e, RFREGOFFSETMASK, 0x00037);
  531. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x3f, RFREGOFFSETMASK, 0xdefce);
  532. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xef, BIT(19), 0x0);
  533. } else {
  534. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xef, BIT(19), 0x1);
  535. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x33, RFREGOFFSETMASK, 0x00026);
  536. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x3e, RFREGOFFSETMASK, 0x00037);
  537. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x3f, RFREGOFFSETMASK, 0x5efce);
  538. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xef, BIT(19), 0x0);
  539. }
  540. } else {
  541. /*WLAN_BTG*/
  542. /*TX IQK mode init*/
  543. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xee, RFREGOFFSETMASK, 0x01000);
  544. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x33, RFREGOFFSETMASK, 0x00004);
  545. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x3f, RFREGOFFSETMASK, 0x01ec1);
  546. odm_set_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0xee, RFREGOFFSETMASK, 0x00000);
  547. }
  548. }
  549. }
  550. void
  551. _iqk_configure_macbb_8821c(
  552. struct PHY_DM_STRUCT *p_dm_odm
  553. )
  554. {
  555. /*MACBB register setting*/
  556. odm_write_1byte(p_dm_odm, 0x522, 0x7f);
  557. odm_set_bb_reg(p_dm_odm, 0x1518, BIT(16), 0x1);
  558. odm_set_bb_reg(p_dm_odm, 0x550, BIT(11) | BIT(3), 0x0);
  559. odm_set_bb_reg(p_dm_odm, 0x90c, BIT(15), 0x1); /*0x90c[15]=1: dac_buf reset selection*/
  560. odm_set_bb_reg(p_dm_odm, 0x9a4, BIT(31), 0x0); /*0x9a4[31]=0: Select da clock*/
  561. /*0xc94[0]=1, 0xe94[0]=1: Åýtx±qiqk¥´¥X¨Ó*/
  562. odm_set_bb_reg(p_dm_odm, 0xc94, BIT(0), 0x1);
  563. /* 3-wire off*/
  564. odm_write_4byte(p_dm_odm, 0xc00, 0x00000004);
  565. /*disable PMAC*/
  566. odm_set_bb_reg(p_dm_odm, 0xb00, BIT(8), 0x0);
  567. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]Set MACBB setting for IQK!!!!\n"));*/
  568. }
  569. void
  570. _iqk_lok_setting_8821c(
  571. struct PHY_DM_STRUCT *p_dm_odm,
  572. u8 path,
  573. u8 uPADindex
  574. )
  575. {
  576. u32 LOK0x56_2G = 0x50ef3;
  577. u32 LOK0x56_5G = 0x50ee8;
  578. u32 LOK0x33 = 0;
  579. u32 LOK0x78 = 0xbcbba;
  580. u32 tmp = 0;
  581. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  582. LOK0x33 = uPADindex;
  583. /*add delay of MAC send packet*/
  584. if (p_dm_odm->mp_mode)
  585. odm_set_bb_reg(p_dm_odm, 0x810, BIT(7)|BIT(6)|BIT(5)|BIT(4), 0x8);
  586. if (p_iqk_info->is_BTG) {
  587. tmp = (LOK0x78 & 0x1c000) >> 14;
  588. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  589. odm_write_4byte(p_dm_odm, 0x1bcc, 0x1b);
  590. odm_write_1byte(p_dm_odm, 0x1b23, 0x00);
  591. odm_write_1byte(p_dm_odm, 0x1b2b, 0x80);
  592. /*0x78[11:0] = IDAC value*/
  593. LOK0x78 = LOK0x78 & (0xe3fff | ((u32)uPADindex << 14));
  594. odm_set_rf_reg(p_dm_odm, path, 0x78, RFREGOFFSETMASK, LOK0x78);
  595. odm_set_rf_reg(p_dm_odm, path, 0x5c, RFREGOFFSETMASK, 0x05320);
  596. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xac018);
  597. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xee, BIT(4), 0x1);
  598. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x33, BIT(3), 0x0);
  599. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK] In the BTG\n"));
  600. } else {
  601. /*tmp = (LOK0x56 & 0xe0) >> 5;*/
  602. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  603. odm_write_4byte(p_dm_odm, 0x1bcc, 0x9);
  604. odm_write_1byte(p_dm_odm, 0x1b23, 0x00);
  605. switch (*p_dm_odm->p_band_type) {
  606. case ODM_BAND_2_4G:
  607. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  608. LOK0x56_2G = LOK0x56_2G & (0xfff1f | ((u32)uPADindex << 5));
  609. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, LOK0x56_2G);
  610. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xadc18);
  611. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xef, BIT(4), 0x1);
  612. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x33, BIT(3), 0x0);
  613. break;
  614. case ODM_BAND_5G:
  615. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  616. LOK0x56_5G = LOK0x56_5G & (0xfff1f | ((u32)uPADindex << 5));
  617. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, LOK0x56_5G);
  618. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xadc18);
  619. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xef, BIT(4), 0x1);
  620. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x33, BIT(3), 0x1);
  621. break;
  622. }
  623. }
  624. /*for IDAC LUT by PAD idx*/
  625. odm_set_rf_reg(p_dm_odm, path, 0x33, BIT(2) | BIT(1) | BIT(0), LOK0x33);
  626. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  627. ("[IQK] LOK0x33 = 0x%x, LOK0x56_2G = 0x%x, LOK0x56_5G = 0x%x,LOK0x78 =0x%x\n",
  628. LOK0x33, LOK0x56_2G, LOK0x56_5G, LOK0x78));
  629. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]Set LOK setting!!!!\n"));*/
  630. }
  631. void
  632. _iqk_txk_setting_8821c(
  633. struct PHY_DM_STRUCT *p_dm_odm,
  634. u8 path
  635. )
  636. {
  637. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  638. if (p_iqk_info->is_BTG) {
  639. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  640. odm_write_4byte(p_dm_odm, 0x1bcc, 0x1b);
  641. odm_write_4byte(p_dm_odm, 0x1b20, 0x00840008);
  642. /*0x78[11:0] = IDAC value*/
  643. odm_set_rf_reg(p_dm_odm, path, 0x78, RFREGOFFSETMASK, 0xbcbba);
  644. odm_set_rf_reg(p_dm_odm, path, 0x5c, RFREGOFFSETMASK, 0x04320);
  645. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xac018);
  646. odm_write_1byte(p_dm_odm, 0x1b2b, 0x80);
  647. } else {
  648. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  649. odm_write_4byte(p_dm_odm, 0x1bcc, 0x9);
  650. odm_write_4byte(p_dm_odm, 0x1b20, 0x01440008);
  651. switch (*p_dm_odm->p_band_type) {
  652. case ODM_BAND_2_4G:
  653. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, 0x50EF3);
  654. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xadc18);
  655. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  656. break;
  657. case ODM_BAND_5G:
  658. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, 0x50EF0);
  659. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xa9c18);
  660. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  661. break;
  662. }
  663. }
  664. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]Set TXK setting!!!!\n"));*/
  665. }
  666. void
  667. _iqk_rxk1setting_8821c(
  668. struct PHY_DM_STRUCT *p_dm_odm,
  669. u8 path
  670. )
  671. {
  672. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  673. if (p_iqk_info->is_BTG) {
  674. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  675. odm_write_1byte(p_dm_odm, 0x1b2b, 0x80);
  676. odm_write_4byte(p_dm_odm, 0x1bcc, 0x09);
  677. odm_write_4byte(p_dm_odm, 0x1b20, 0x01450008);
  678. odm_write_4byte(p_dm_odm, 0x1b24, 0x01460c88);
  679. /*0x78[11:0] = IDAC value*/
  680. odm_set_rf_reg(p_dm_odm, path, 0x78, RFREGOFFSETMASK, 0x8cbba);
  681. odm_set_rf_reg(p_dm_odm, path, 0x5c, RFREGOFFSETMASK, 0x00320);
  682. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xa8018);
  683. } else {
  684. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  685. switch (*p_dm_odm->p_band_type) {
  686. case ODM_BAND_2_4G:
  687. odm_write_1byte(p_dm_odm, 0x1bcc, 0x12);
  688. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  689. odm_write_4byte(p_dm_odm, 0x1b20, 0x01450008);
  690. odm_write_4byte(p_dm_odm, 0x1b24, 0x01461068);
  691. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, 0x510f3);
  692. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xa9c00);
  693. break;
  694. case ODM_BAND_5G:
  695. odm_write_1byte(p_dm_odm, 0x1bcc, 0x9);
  696. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  697. odm_write_4byte(p_dm_odm, 0x1b20, 0x00450008);
  698. odm_write_4byte(p_dm_odm, 0x1b24, 0x00461468);
  699. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, 0x510f3);
  700. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xa9c00);
  701. break;
  702. }
  703. }
  704. /*ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]Set RXK setting!!!!\n"));*/
  705. }
  706. static u8 btg_lna[5] = {0x0, 0x4, 0x8, 0xc, 0xf};
  707. static u8 wlg_lna[5] = {0x0, 0x1, 0x2, 0x3, 0x5};
  708. static u8 wla_lna[5] = {0x0, 0x1, 0x3, 0x4, 0x5};
  709. void
  710. _iqk_rxk2setting_8821c(
  711. struct PHY_DM_STRUCT *p_dm_odm,
  712. u8 path,
  713. boolean is_gs
  714. )
  715. {
  716. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  717. if (p_iqk_info->is_BTG) {
  718. if (is_gs) {
  719. p_iqk_info->tmp1bcc = 0x1b;
  720. p_iqk_info->lna_idx = 2;
  721. }
  722. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  723. odm_write_1byte(p_dm_odm, 0x1b2b, 0x80);
  724. odm_write_4byte(p_dm_odm, 0x1bcc, p_iqk_info->tmp1bcc);
  725. odm_write_4byte(p_dm_odm, 0x1b20, 0x01450008);
  726. odm_write_4byte(p_dm_odm, 0x1b24, (0x01460048 | (btg_lna[p_iqk_info->lna_idx] << 10)));
  727. /*0x78[11:0] = IDAC value*/
  728. odm_set_rf_reg(p_dm_odm, path, 0x78, RFREGOFFSETMASK, 0x8cbba);
  729. odm_set_rf_reg(p_dm_odm, path, 0x5c, RFREGOFFSETMASK, 0x00320);
  730. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xa8018);
  731. } else {
  732. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  733. switch (*p_dm_odm->p_band_type) {
  734. case ODM_BAND_2_4G:
  735. if (is_gs) {
  736. p_iqk_info->tmp1bcc = 0x12;
  737. p_iqk_info->lna_idx = 2;
  738. }
  739. odm_write_1byte(p_dm_odm, 0x1bcc, p_iqk_info->tmp1bcc);
  740. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  741. odm_write_4byte(p_dm_odm, 0x1b20, 0x01450008);
  742. odm_write_4byte(p_dm_odm, 0x1b24, (0x01460048 | (wlg_lna[p_iqk_info->lna_idx] << 10)));
  743. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, 0x510f3);
  744. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xa9c00);
  745. break;
  746. case ODM_BAND_5G:
  747. if (is_gs) {
  748. p_iqk_info->tmp1bcc = 0x12;
  749. p_iqk_info->lna_idx = 2;
  750. }
  751. odm_write_1byte(p_dm_odm, 0x1bcc, p_iqk_info->tmp1bcc);
  752. odm_write_1byte(p_dm_odm, 0x1b2b, 0x00);
  753. odm_write_4byte(p_dm_odm, 0x1b20, 0x00450008);
  754. odm_write_4byte(p_dm_odm, 0x1b24, (0x01460048 | (wla_lna[p_iqk_info->lna_idx] << 10)));
  755. odm_set_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK, 0x51000);
  756. odm_set_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK, 0xa9c00);
  757. break;
  758. }
  759. }
  760. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]Set RXK setting!!!!\n"));*/
  761. }
  762. boolean
  763. _iqk_check_cal_8821c(
  764. struct PHY_DM_STRUCT *p_dm_odm,
  765. u32 IQK_CMD
  766. )
  767. {
  768. boolean notready = true, fail = true;
  769. u32 delay_count = 0x0;
  770. while (notready) {
  771. if (odm_read_4byte(p_dm_odm, 0x1b00) == (IQK_CMD & 0xffffff0f)) {
  772. fail = (boolean) odm_get_bb_reg(p_dm_odm, 0x1b08, BIT(26));
  773. notready = false;
  774. } else {
  775. ODM_delay_ms(1);
  776. delay_count++;
  777. }
  778. if (delay_count >= 50) {
  779. fail = true;
  780. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  781. ("[IQK]IQK timeout!!!\n"));
  782. break;
  783. }
  784. }
  785. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  786. ("[IQK]delay count = 0x%x!!!\n", delay_count));
  787. return fail;
  788. }
  789. boolean
  790. _iqk_rx_iqk_gain_search_fail_8821c(
  791. struct PHY_DM_STRUCT *p_dm_odm,
  792. u8 path,
  793. u8 step
  794. )
  795. {
  796. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  797. boolean fail = true;
  798. u32 IQK_CMD = 0x0, rf_reg0, tmp, rxbb;
  799. u8 IQMUX[4] = {0x9, 0x12, 0x1b, 0x24}, *plna;
  800. u8 idx;
  801. u8 lna_setting[5];
  802. if (p_iqk_info->is_BTG)
  803. plna = btg_lna;
  804. else if (*p_dm_odm->p_band_type == ODM_BAND_2_4G)
  805. plna = wlg_lna;
  806. else
  807. plna = wla_lna;
  808. for (idx = 0; idx < 4; idx++)
  809. if (p_iqk_info->tmp1bcc == IQMUX[idx])
  810. break;
  811. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  812. odm_write_4byte(p_dm_odm, 0x1bcc, p_iqk_info->tmp1bcc);
  813. if (step == RXIQK1)
  814. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]============ S%d RXIQK GainSearch ============\n", p_iqk_info->is_BTG));
  815. if (step == RXIQK1)
  816. IQK_CMD = 0xf8000208 | (1 << (path + 4));
  817. else
  818. IQK_CMD = 0xf8000308 | (1 << (path + 4));
  819. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]S%d GS%d_Trigger = 0x%x\n", path, step, IQK_CMD));
  820. _iqk_set_gnt_wl_gnt_bt(p_dm_odm, true);
  821. odm_write_4byte(p_dm_odm, 0x1b00, IQK_CMD);
  822. odm_write_4byte(p_dm_odm, 0x1b00, IQK_CMD + 0x1);
  823. ODM_delay_ms(GS_delay_8821C);
  824. fail = _iqk_check_cal_8821c(p_dm_odm, IQK_CMD);
  825. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]check 0x49c = %x\n", odm_read_1byte(p_dm_odm, 0x49c)));
  826. _iqk_set_gnt_wl_gnt_bt(p_dm_odm, false);
  827. if (step == RXIQK2) {
  828. rf_reg0 = odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x0, RFREGOFFSETMASK);
  829. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  830. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  831. ("[IQK]S%d ==> RF0x0 = 0x%x, tmp1bcc = 0x%x, idx = %d, 0x1b3c = 0x%x\n", path, rf_reg0, p_iqk_info->tmp1bcc, idx, odm_read_4byte(p_dm_odm, 0x1b3c)));
  832. tmp = (rf_reg0 & 0x1fe0) >> 5;
  833. rxbb = tmp & 0x1f;
  834. #if 1
  835. if (rxbb == 0x1) {
  836. if (idx != 3)
  837. idx++;
  838. else if (p_iqk_info->lna_idx != 0x0)
  839. p_iqk_info->lna_idx--;
  840. else
  841. p_iqk_info->isbnd = true;
  842. fail = true;
  843. } else if (rxbb == 0xa) {
  844. if (idx != 0)
  845. idx--;
  846. else if (p_iqk_info->lna_idx != 0x4)
  847. p_iqk_info->lna_idx++;
  848. else
  849. p_iqk_info->isbnd = true;
  850. fail = true;
  851. } else
  852. fail = false;
  853. if (p_iqk_info->isbnd == true)
  854. fail = false;
  855. #endif
  856. #if 0
  857. if (rxbb == 0x1) {
  858. if (p_iqk_info->lna_idx != 0x0)
  859. p_iqk_info->lna_idx--;
  860. else if (idx != 3)
  861. idx++;
  862. else
  863. p_iqk_info->isbnd = true;
  864. fail = true;
  865. } else if (rxbb == 0xa) {
  866. if (idx != 0)
  867. idx--;
  868. else if (p_iqk_info->lna_idx != 0x7)
  869. p_iqk_info->lna_idx++;
  870. else
  871. p_iqk_info->isbnd = true;
  872. fail = true;
  873. } else
  874. fail = false;
  875. if (p_iqk_info->isbnd == true)
  876. fail = false;
  877. #endif
  878. p_iqk_info->tmp1bcc = IQMUX[idx];
  879. if (fail) {
  880. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  881. odm_write_4byte(p_dm_odm, 0x1b24, (odm_read_4byte(p_dm_odm, 0x1b24) & 0xffffc3ff) | (*(plna + p_iqk_info->lna_idx) << 10));
  882. }
  883. }
  884. return fail;
  885. }
  886. boolean
  887. _lok_one_shot_8821c(
  888. struct PHY_DM_STRUCT *p_dm_void,
  889. u8 path,
  890. u8 uPADindex
  891. )
  892. {
  893. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  894. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  895. u8 delay_count = 0, i;
  896. boolean LOK_notready = false;
  897. u32 LOK_temp1 = 0, LOK_temp2 = 0, LOK_temp3 = 0;
  898. u32 IQK_CMD = 0x0;
  899. u8 LOKreg[] = {0x58, 0x78};
  900. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  901. ("[IQK]==========S%d LOK ==========\n", p_iqk_info->is_BTG));
  902. IQK_CMD = 0xf8000008 | (1 << (4 + path));
  903. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]LOK_Trigger = 0x%x\n", IQK_CMD));
  904. _iqk_set_gnt_wl_gnt_bt(p_dm_odm, true);
  905. odm_write_4byte(p_dm_odm, 0x1b00, IQK_CMD);
  906. odm_write_4byte(p_dm_odm, 0x1b00, IQK_CMD + 1);
  907. /*LOK: CMD ID = 0 {0xf8000018, 0xf8000028}*/
  908. /*LOK: CMD ID = 0 {0xf8000019, 0xf8000029}*/
  909. ODM_delay_ms(LOK_delay_8821C);
  910. delay_count = 0;
  911. LOK_notready = true;
  912. while (LOK_notready) {
  913. if (odm_read_4byte(p_dm_odm, 0x1b00) == (IQK_CMD & 0xffffff0f))
  914. LOK_notready = false;
  915. else
  916. LOK_notready = true;
  917. if (LOK_notready) {
  918. ODM_delay_ms(1);
  919. delay_count++;
  920. }
  921. if (delay_count >= 50) {
  922. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  923. ("[IQK]S%d LOK timeout!!!\n", path));
  924. break;
  925. }
  926. }
  927. _iqk_set_gnt_wl_gnt_bt(p_dm_odm, false);
  928. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  929. ("[IQK]S%d ==> delay_count = 0x%x\n", path, delay_count));
  930. if (!LOK_notready) {
  931. LOK_temp2 = odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x8, RFREGOFFSETMASK);
  932. LOK_temp3 = odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x58, RFREGOFFSETMASK);
  933. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  934. ("[IQK]0x8 = 0x%x, 0x58 = 0x%x\n", LOK_temp2, LOK_temp3));
  935. } else {
  936. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  937. ("[IQK]==>S%d LOK Fail!!!\n", path));
  938. }
  939. p_iqk_info->LOK_fail[path] = LOK_notready;
  940. /*fill IDAC LUT table*/
  941. /*
  942. for (i = 0; i < 8; i++) {
  943. odm_set_rf_reg(p_dm_odm, path, 0x33, BIT(2)|BIT(1)|BIT(0), i);
  944. odm_set_rf_reg(p_dm_odm, path, 0x8, RFREGOFFSETMASK, LOK_temp2);
  945. }
  946. */
  947. return LOK_notready;
  948. }
  949. boolean
  950. _iqk_one_shot_8821c(
  951. void *p_dm_void,
  952. u8 path,
  953. u8 idx
  954. )
  955. {
  956. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  957. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  958. u8 delay_count = 0;
  959. boolean notready = true, fail = true, search_fail = true;
  960. u32 IQK_CMD = 0x0, tmp;
  961. u16 iqk_apply[2] = {0xc94, 0xe94};
  962. if (idx == TX_IQK)
  963. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]============ S%d WBTXIQK ============\n", p_iqk_info->is_BTG));
  964. else if (idx == RXIQK1)
  965. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]============ S%d WBRXIQK STEP1============\n", p_iqk_info->is_BTG));
  966. else
  967. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]============ S%d WBRXIQK STEP2============\n", p_iqk_info->is_BTG));
  968. if (idx == TXIQK) {
  969. IQK_CMD = 0xf8000008 | ((*p_dm_odm->p_band_width + 4) << 8) | (1 << (path + 4));
  970. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]TXK_Trigger = 0x%x\n", IQK_CMD));
  971. /*{0xf8000418, 0xf800042a} ==> 20 WBTXK (CMD = 4)*/
  972. /*{0xf8000518, 0xf800052a} ==> 40 WBTXK (CMD = 5)*/
  973. /*{0xf8000618, 0xf800062a} ==> 80 WBTXK (CMD = 6)*/
  974. } else if (idx == RXIQK1) {
  975. if (*p_dm_odm->p_band_width == 2)
  976. IQK_CMD = 0xf8000808 | (1 << (path + 4));
  977. else
  978. IQK_CMD = 0xf8000708 | (1 << (path + 4));
  979. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]RXK1_Trigger = 0x%x\n", IQK_CMD));
  980. /*{0xf8000718, 0xf800072a} ==> 20 WBTXK (CMD = 7)*/
  981. /*{0xf8000718, 0xf800072a} ==> 40 WBTXK (CMD = 7)*/
  982. /*{0xf8000818, 0xf800082a} ==> 80 WBTXK (CMD = 8)*/
  983. } else if (idx == RXIQK2) {
  984. IQK_CMD = 0xf8000008 | ((*p_dm_odm->p_band_width + 9) << 8) | (1 << (path + 4));
  985. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]RXK2_Trigger = 0x%x\n", IQK_CMD));
  986. /*{0xf8000918, 0xf800092a} ==> 20 WBRXK (CMD = 9)*/
  987. /*{0xf8000a18, 0xf8000a2a} ==> 40 WBRXK (CMD = 10)*/
  988. /*{0xf8000b18, 0xf8000b2a} ==> 80 WBRXK (CMD = 11)*/
  989. }
  990. _iqk_set_gnt_wl_gnt_bt(p_dm_odm, true);
  991. odm_write_4byte(p_dm_odm, 0x1bc8, 0x80000000);
  992. odm_write_4byte(p_dm_odm, 0x8f8, 0x41400080);
  993. odm_write_4byte(p_dm_odm, 0x1b00, IQK_CMD);
  994. odm_write_4byte(p_dm_odm, 0x1b00, IQK_CMD + 0x1);
  995. ODM_delay_ms(WBIQK_delay_8821C);
  996. while (notready) {
  997. if (odm_read_4byte(p_dm_odm, 0xfa0) & BIT(27))/*if (odm_read_4byte(p_dm_odm, 0x1b00) == (IQK_CMD & 0xffffff0f))*/
  998. notready = false;
  999. else
  1000. notready = true;
  1001. if (notready) {
  1002. ODM_delay_ms(1);
  1003. delay_count++;
  1004. } else {
  1005. fail = (boolean) odm_get_bb_reg(p_dm_odm, 0x1b08, BIT(26));
  1006. break;
  1007. }
  1008. if (delay_count >= 50) {
  1009. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1010. ("[IQK]S%d IQK timeout!!!\n", path));
  1011. break;
  1012. }
  1013. }
  1014. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]check 0x49c = %x\n", odm_read_1byte(p_dm_odm, 0x49c)));
  1015. _iqk_set_gnt_wl_gnt_bt(p_dm_odm, false);
  1016. if (p_dm_odm->debug_components && ODM_COMP_CALIBRATION) {
  1017. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  1018. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1019. ("[IQK]S%d ==> 0x1b00 = 0x%x, 0x1b08 = 0x%x\n", path, odm_read_4byte(p_dm_odm, 0x1b00), odm_read_4byte(p_dm_odm, 0x1b08)));
  1020. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1021. ("[IQK]S%d ==> delay_count = 0x%x\n", path, delay_count));
  1022. if (idx != TXIQK)
  1023. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1024. ("[IQK]S%d ==> RF0x0 = 0x%x, RF0x%x = 0x%x\n", path,
  1025. odm_get_rf_reg(p_dm_odm, path, 0x0, RFREGOFFSETMASK), (p_iqk_info->is_BTG) ? 0x78 : 0x56,
  1026. (p_iqk_info->is_BTG) ? odm_get_rf_reg(p_dm_odm, path, 0x78, RFREGOFFSETMASK) : odm_get_rf_reg(p_dm_odm, path, 0x56, RFREGOFFSETMASK)));
  1027. }
  1028. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | path << 1);
  1029. if (idx == TXIQK)
  1030. if (fail)
  1031. odm_set_bb_reg(p_dm_odm, iqk_apply[path], BIT(0), 0x0);
  1032. if (idx == RXIQK2) {
  1033. p_iqk_info->RXIQK_AGC[0][path] =
  1034. (u16)(((odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x0, RFREGOFFSETMASK) >> 5) & 0xff) |
  1035. (p_iqk_info->tmp1bcc << 8));
  1036. odm_write_4byte(p_dm_odm, 0x1b38, 0x20000000);
  1037. if (!fail)
  1038. odm_set_bb_reg(p_dm_odm, iqk_apply[path], (BIT(11) | BIT(10)), 0x1);
  1039. else
  1040. odm_set_bb_reg(p_dm_odm, iqk_apply[path], (BIT(11) | BIT(10)), 0x0);
  1041. }
  1042. if (idx == TXIQK)
  1043. p_iqk_info->IQK_fail_report[0][path][TXIQK] = fail;
  1044. else
  1045. p_iqk_info->IQK_fail_report[0][path][RXIQK] = fail;
  1046. return fail;
  1047. }
  1048. boolean
  1049. _iqk_rxiqkbystep_8821c(
  1050. void *p_dm_void,
  1051. u8 path
  1052. )
  1053. {
  1054. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  1055. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  1056. boolean KFAIL = true, gonext;
  1057. #if 1
  1058. switch (p_iqk_info->rxiqk_step) {
  1059. case 1: /*gain search_RXK1*/
  1060. _iqk_rxk1setting_8821c(p_dm_odm, path);
  1061. gonext = false;
  1062. while (1) {
  1063. KFAIL = _iqk_rx_iqk_gain_search_fail_8821c(p_dm_odm, path, RXIQK1);
  1064. if (KFAIL && (p_iqk_info->gs_retry_count[0][path][RXIQK1] < 2))
  1065. p_iqk_info->gs_retry_count[0][path][RXIQK1]++;
  1066. else if (KFAIL) {
  1067. p_iqk_info->RXIQK_fail_code[0][path] = 0;
  1068. p_iqk_info->rxiqk_step = 5;
  1069. gonext = true;
  1070. } else {
  1071. p_iqk_info->rxiqk_step++;
  1072. gonext = true;
  1073. }
  1074. if (gonext)
  1075. break;
  1076. }
  1077. break;
  1078. case 2: /*gain search_RXK2*/
  1079. _iqk_rxk2setting_8821c(p_dm_odm, path, true);
  1080. p_iqk_info->isbnd = false;
  1081. while (1) {
  1082. KFAIL = _iqk_rx_iqk_gain_search_fail_8821c(p_dm_odm, path, RXIQK2);
  1083. if (KFAIL && (p_iqk_info->gs_retry_count[0][path][RXIQK2] < rxiqk_gs_limit))
  1084. p_iqk_info->gs_retry_count[0][path][RXIQK2]++;
  1085. else {
  1086. p_iqk_info->rxiqk_step++;
  1087. break;
  1088. }
  1089. }
  1090. break;
  1091. case 3: /*RXK1*/
  1092. _iqk_rxk1setting_8821c(p_dm_odm, path);
  1093. gonext = false;
  1094. while (1) {
  1095. KFAIL = _iqk_one_shot_8821c(p_dm_odm, path, RXIQK1);
  1096. if (KFAIL && (p_iqk_info->retry_count[0][path][RXIQK1] < 2))
  1097. p_iqk_info->retry_count[0][path][RXIQK1]++;
  1098. else if (KFAIL) {
  1099. p_iqk_info->RXIQK_fail_code[0][path] = 1;
  1100. p_iqk_info->rxiqk_step = 5;
  1101. gonext = true;
  1102. } else {
  1103. p_iqk_info->rxiqk_step++;
  1104. gonext = true;
  1105. }
  1106. if (gonext)
  1107. break;
  1108. }
  1109. break;
  1110. case 4: /*RXK2*/
  1111. _iqk_rxk2setting_8821c(p_dm_odm, path, false);
  1112. gonext = false;
  1113. while (1) {
  1114. KFAIL = _iqk_one_shot_8821c(p_dm_odm, path, RXIQK2);
  1115. if (KFAIL && (p_iqk_info->retry_count[0][path][RXIQK2] < 2))
  1116. p_iqk_info->retry_count[0][path][RXIQK2]++;
  1117. else if (KFAIL) {
  1118. p_iqk_info->RXIQK_fail_code[0][path] = 2;
  1119. p_iqk_info->rxiqk_step = 5;
  1120. gonext = true;
  1121. } else {
  1122. p_iqk_info->rxiqk_step++;
  1123. gonext = true;
  1124. }
  1125. if (gonext)
  1126. break;
  1127. }
  1128. break;
  1129. }
  1130. return KFAIL;
  1131. #endif
  1132. }
  1133. void
  1134. _iqk_iqk_by_path_8821c(
  1135. void *p_dm_void,
  1136. boolean segment_iqk
  1137. )
  1138. {
  1139. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  1140. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  1141. boolean KFAIL = true;
  1142. u8 i, kcount_limit;
  1143. u32 cnt_iqk_fail = 0;
  1144. /* ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]iqk_step = 0x%x\n", p_dm_odm->rf_calibrate_info.iqk_step)); */
  1145. if (*p_dm_odm->p_band_width == 2)
  1146. kcount_limit = kcount_limit_80m;
  1147. else
  1148. kcount_limit = kcount_limit_others;
  1149. while (1) {
  1150. switch (p_dm_odm->rf_calibrate_info.iqk_step) {
  1151. case 1: /*S0 LOK*/
  1152. for (i = 0; i < 8 ; i++) {/* the LOK Cal in the each PAD stage*/
  1153. _iqk_lok_setting_8821c(p_dm_odm, ODM_RF_PATH_A, i);
  1154. _lok_one_shot_8821c(p_dm_odm, ODM_RF_PATH_A, i);
  1155. }
  1156. p_dm_odm->rf_calibrate_info.iqk_step++;
  1157. break;
  1158. case 2: /*S0 TXIQK*/
  1159. _iqk_txk_setting_8821c(p_dm_odm, ODM_RF_PATH_A);
  1160. KFAIL = _iqk_one_shot_8821c(p_dm_odm, ODM_RF_PATH_A, TXIQK);
  1161. p_iqk_info->kcount++;
  1162. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]KFail = 0x%x\n", KFAIL));
  1163. if (KFAIL)
  1164. cnt_iqk_fail++;
  1165. if (KFAIL && (p_iqk_info->retry_count[0][ODM_RF_PATH_A][TXIQK] < 3))
  1166. p_iqk_info->retry_count[0][ODM_RF_PATH_A][TXIQK]++;
  1167. else
  1168. p_dm_odm->rf_calibrate_info.iqk_step++;
  1169. break;
  1170. case 3: /*S0 RXIQK*/
  1171. while (1) {
  1172. KFAIL = _iqk_rxiqkbystep_8821c(p_dm_odm, ODM_RF_PATH_A);
  1173. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]S0RXK KFail = 0x%x\n", KFAIL));
  1174. if (p_iqk_info->rxiqk_step == 5) {
  1175. p_dm_odm->rf_calibrate_info.iqk_step++;
  1176. p_iqk_info->rxiqk_step = 1;
  1177. if (KFAIL) {
  1178. cnt_iqk_fail++;
  1179. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1180. ("[IQK]S0RXK fail code: %d!!!\n", p_iqk_info->RXIQK_fail_code[0][ODM_RF_PATH_A]));
  1181. }
  1182. break;
  1183. }
  1184. }
  1185. p_iqk_info->kcount++;
  1186. break;
  1187. }
  1188. if (p_dm_odm->rf_calibrate_info.iqk_step == 4) {
  1189. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1190. ("[IQK]==========LOK summary ==========\n"));
  1191. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1192. ("[IQK]PathA_LOK_notready = %d\n",
  1193. p_iqk_info->LOK_fail[ODM_RF_PATH_A]));
  1194. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1195. ("[IQK]==========IQK summary ==========\n"));
  1196. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1197. ("[IQK]PathA_TXIQK_fail = %d\n",
  1198. p_iqk_info->IQK_fail_report[0][ODM_RF_PATH_A][TXIQK]));
  1199. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1200. ("[IQK]PathA_RXIQK_fail = %d\n",
  1201. p_iqk_info->IQK_fail_report[0][ODM_RF_PATH_A][RXIQK]));
  1202. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1203. ("[IQK]PathA_TXIQK_retry = %d\n",
  1204. p_iqk_info->retry_count[0][ODM_RF_PATH_A][TXIQK]));
  1205. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1206. ("[IQK]PathA_RXK1_retry = %d, PathA_RXK2_retry = %d\n",
  1207. p_iqk_info->retry_count[0][ODM_RF_PATH_A][RXIQK1], p_iqk_info->retry_count[0][ODM_RF_PATH_A][RXIQK2]));
  1208. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1209. ("[IQK]PathA_GS1_retry = %d, PathA_GS2_retry = %d\n",
  1210. p_iqk_info->gs_retry_count[0][ODM_RF_PATH_A][RXIQK1], p_iqk_info->gs_retry_count[0][ODM_RF_PATH_A][RXIQK2]));
  1211. for (i = 0; i < SS_8821C; i++) {
  1212. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008 | i << 1);
  1213. odm_write_4byte(p_dm_odm, 0x1b2c, 0x7);
  1214. odm_write_4byte(p_dm_odm, 0x1bcc, 0x0);
  1215. odm_write_4byte(p_dm_odm, 0x1b38, 0x20000000);
  1216. }
  1217. break;
  1218. }
  1219. p_dm_odm->n_iqk_cnt++;
  1220. if (cnt_iqk_fail == 0)
  1221. p_dm_odm->n_iqk_ok_cnt++;
  1222. else
  1223. p_dm_odm->n_iqk_fail_cnt = p_dm_odm->n_iqk_fail_cnt + cnt_iqk_fail;
  1224. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1225. ("All/Ok/Fail = %d %d %d\n", p_dm_odm->n_iqk_cnt, p_dm_odm->n_iqk_ok_cnt, p_dm_odm->n_iqk_fail_cnt));
  1226. if ((segment_iqk == true) && (p_iqk_info->kcount == kcount_limit))
  1227. break;
  1228. }
  1229. }
  1230. void
  1231. _iqk_start_iqk_8821c(
  1232. struct PHY_DM_STRUCT *p_dm_odm,
  1233. boolean segment_iqk
  1234. )
  1235. {
  1236. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  1237. u32 tmp;
  1238. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008);
  1239. odm_write_4byte(p_dm_odm, 0x1bb8, 0x00000000);
  1240. /*GNT_WL = 1*/
  1241. if (p_iqk_info->is_BTG) {
  1242. tmp = odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x1, RFREGOFFSETMASK);
  1243. tmp = (tmp & (~BIT(3))) | BIT(0) | BIT(2);
  1244. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x1, RFREGOFFSETMASK, tmp);
  1245. }
  1246. _iqk_iqk_by_path_8821c(p_dm_odm, segment_iqk);
  1247. }
  1248. void
  1249. _iq_calibrate_8821c_init(
  1250. void *p_dm_void
  1251. )
  1252. {
  1253. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  1254. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  1255. u8 i, j, k, m;
  1256. if (p_iqk_info->iqk_times == 0x0) {
  1257. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]=====>PHY_IQCalibrate_8821C_Init\n"));
  1258. for (i = 0; i < SS_8821C; i++) {
  1259. for (j = 0; j < 2; j++) {
  1260. p_iqk_info->LOK_fail[i] = true;
  1261. p_iqk_info->IQK_fail[j][i] = true;
  1262. p_iqk_info->iqc_matrix[j][i] = 0x20000000;
  1263. }
  1264. }
  1265. for (i = 0; i < 2; i++) {
  1266. p_iqk_info->iqk_channel[i] = 0x0;
  1267. for (j = 0; j < SS_8821C; j++) {
  1268. p_iqk_info->LOK_IDAC[i][j] = 0x0;
  1269. p_iqk_info->RXIQK_AGC[i][j] = 0x0;
  1270. p_iqk_info->bypass_iqk[i][j] = 0x0;
  1271. for (k = 0; k < 2; k++) {
  1272. p_iqk_info->IQK_fail_report[i][j][k] = true;
  1273. for (m = 0; m < 8; m++) {
  1274. p_iqk_info->IQK_CFIR_real[i][j][k][m] = 0x0;
  1275. p_iqk_info->IQK_CFIR_imag[i][j][k][m] = 0x0;
  1276. }
  1277. }
  1278. for (k = 0; k < 3; k++)
  1279. p_iqk_info->retry_count[i][j][k] = 0x0;
  1280. }
  1281. }
  1282. }
  1283. }
  1284. /*
  1285. void
  1286. _DPK_BackupReg_8821C(
  1287. struct PHY_DM_STRUCT* p_dm_odm,
  1288. static u32* DPK_backup,
  1289. u32* backup_dpk_reg
  1290. )
  1291. {
  1292. u32 i;
  1293. for (i = 0; i < DPK_BACKUP_REG_NUM_8821C; i++)
  1294. DPK_backup[i] = odm_read_4byte(p_dm_odm, backup_dpk_reg[i]);
  1295. }
  1296. void
  1297. _DPK_Restore_8821C(
  1298. struct PHY_DM_STRUCT* p_dm_odm,
  1299. static u32* DPK_backup,
  1300. u32* backup_dpk_reg
  1301. )
  1302. {
  1303. u32 i;
  1304. for (i = 0; i < DPK_BACKUP_REG_NUM_8821C; i++)
  1305. odm_write_4byte(p_dm_odm, backup_dpk_reg[i], DPK_backup[i]);
  1306. }
  1307. */
  1308. void
  1309. _dpk_dpk_setting_8821c(
  1310. struct PHY_DM_STRUCT *p_dm_odm,
  1311. u8 path
  1312. )
  1313. {
  1314. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1315. ("[DPK]==========Start the DPD setting Initilaize/n"));
  1316. /*AFE setting*/
  1317. odm_write_4byte(p_dm_odm, 0xc60, 0x50000000);
  1318. odm_write_4byte(p_dm_odm, 0xc60, 0x700F0040);
  1319. odm_write_4byte(p_dm_odm, 0xc5c, 0xd1000120);
  1320. odm_write_4byte(p_dm_odm, 0xc58, 0xd8000402);
  1321. odm_write_4byte(p_dm_odm, 0xc6c, 0x00000a15);
  1322. odm_write_4byte(p_dm_odm, 0xc00, 0x00000000);
  1323. /*_iqk_bb_reset_8821c(p_dm_odm);*/
  1324. odm_write_4byte(p_dm_odm, 0xe5c, 0xD1000120);
  1325. odm_write_4byte(p_dm_odm, 0xc6c, 0x00000A15);
  1326. odm_write_4byte(p_dm_odm, 0xe6c, 0x00000A15);
  1327. odm_write_4byte(p_dm_odm, 0x808, 0x2D028200);
  1328. odm_write_4byte(p_dm_odm, 0x810, 0x20101063);
  1329. odm_write_4byte(p_dm_odm, 0x90c, 0x0B00C000);
  1330. odm_write_4byte(p_dm_odm, 0x9a4, 0x00000080);
  1331. odm_write_4byte(p_dm_odm, 0xc94, 0x01000101);
  1332. odm_write_4byte(p_dm_odm, 0xe94, 0x01000101);
  1333. odm_write_4byte(p_dm_odm, 0xe5c, 0xD1000120);
  1334. odm_write_4byte(p_dm_odm, 0xc6c, 0x00000A15);
  1335. odm_write_4byte(p_dm_odm, 0xe6c, 0x00000A15);
  1336. odm_write_4byte(p_dm_odm, 0x808, 0x2D028200);
  1337. odm_write_4byte(p_dm_odm, 0x810, 0x20101063);
  1338. odm_write_4byte(p_dm_odm, 0x90c, 0x0B00C000);
  1339. odm_write_4byte(p_dm_odm, 0x9a4, 0x00000080);
  1340. odm_write_4byte(p_dm_odm, 0xc94, 0x01000101);
  1341. odm_write_4byte(p_dm_odm, 0xe94, 0x01000101);
  1342. odm_write_4byte(p_dm_odm, 0x1904, 0x00020000);
  1343. /*path A*/
  1344. odm_set_bb_reg(p_dm_odm, 0x1d00, MASKDWORD, 0x30303030); /* cck */
  1345. odm_set_bb_reg(p_dm_odm, 0x1d04, MASKDWORD, 0x30303030); /* ofdm 6M/9M/12M/18M */
  1346. odm_set_bb_reg(p_dm_odm, 0x1d08, MASKDWORD, 0x30303030); /* ofdm 24M/36M/48M/54M */
  1347. odm_set_bb_reg(p_dm_odm, 0x1d0c, MASKDWORD, 0x30303030); /* mcs0~3 */
  1348. odm_set_bb_reg(p_dm_odm, 0x1d10, MASKDWORD, 0x30303030); /* mcs4~7 */
  1349. odm_set_bb_reg(p_dm_odm, 0x1d2c, MASKDWORD, 0x30303030); /* vht_1ss_mcs0~3 */
  1350. odm_set_bb_reg(p_dm_odm, 0x1d30, MASKDWORD, 0x30303030); /* vht_1ss_mcs4~7 */
  1351. odm_set_bb_reg(p_dm_odm, 0x1d34, 0x0000FFFF, 0x3030); /* vht_1ss_mcs8/9 */
  1352. /*RF*/
  1353. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xEF, RFREGOFFSETMASK, 0x80000);
  1354. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x33, RFREGOFFSETMASK, 0x00024);
  1355. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x3E, RFREGOFFSETMASK, 0x0003F);
  1356. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x3F, RFREGOFFSETMASK, 0xCBFCE);
  1357. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xEF, RFREGOFFSETMASK, 0x00000);
  1358. /*AGC boundary selection*/
  1359. odm_write_4byte(p_dm_odm, 0x1bbc, 0x0001abf6);
  1360. odm_write_4byte(p_dm_odm, 0x1b90, 0x0001e018);
  1361. odm_write_4byte(p_dm_odm, 0x1bb8, 0x000fffff);
  1362. odm_write_4byte(p_dm_odm, 0x1bc8, 0x000c55aa);
  1363. /*odm_write_4byte(p_dm_odm, 0x1bcc, 0x11978200);*/
  1364. odm_write_4byte(p_dm_odm, 0x1bcc, 0x11978800);
  1365. odm_write_4byte(p_dm_odm, 0xcb0, 0x77775747);
  1366. odm_write_4byte(p_dm_odm, 0xcb4, 0x100000f7);
  1367. odm_write_4byte(p_dm_odm, 0xcbc, 0x0);
  1368. }
  1369. void
  1370. _dpk_dynamic_bias_8821c(
  1371. struct PHY_DM_STRUCT *p_dm_odm,
  1372. u8 path,
  1373. u8 dynamicbias
  1374. )
  1375. {
  1376. u32 tmp;
  1377. tmp = odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xdf, RFREGOFFSETMASK);
  1378. tmp = tmp | BIT(8);
  1379. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xdf, RFREGOFFSETMASK, tmp);
  1380. if ((*p_dm_odm->p_band_type == ODM_BAND_5G) && (*p_dm_odm->p_band_width == 1))
  1381. odm_set_rf_reg(p_dm_odm, path, 0x61, BIT(7) | BIT(6) | BIT(5) | BIT(4), dynamicbias);
  1382. if ((*p_dm_odm->p_band_type == ODM_BAND_5G) && (*p_dm_odm->p_band_width == 2))
  1383. odm_set_rf_reg(p_dm_odm, path, 0x61, BIT(7) | BIT(6) | BIT(5) | BIT(4), dynamicbias);
  1384. tmp = tmp & (~BIT(8));
  1385. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xdf, RFREGOFFSETMASK, tmp);
  1386. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1387. ("[DPK]Set DynamicBias 0xdf=0x%x, 0x61=0x%x\n", odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0xdf, RFREGOFFSETMASK), odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x61, RFREGOFFSETMASK)));
  1388. }
  1389. void
  1390. _dpk_dpk_boundary_selection_8821c(
  1391. struct PHY_DM_STRUCT *p_dm_odm,
  1392. u8 path
  1393. )
  1394. {
  1395. u8 tmp_pad, compared_pad, reg_tmp, compared_txbb;
  1396. u8 tmp_txbb = 0;
  1397. u32 rf_backup_reg00;
  1398. u8 i = 0;
  1399. u8 j = 1;
  1400. u32 boundaryselect = 0;
  1401. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  1402. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1403. ("[DPK]Start the DPD boundary selection\n"));
  1404. rf_backup_reg00 = odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x00, RFREGOFFSETMASK);
  1405. tmp_pad = 0;
  1406. compared_pad = 0;
  1407. boundaryselect = 0;
  1408. #if dpk_forcein_sram4
  1409. for (i = 0x1f; i > 0x0; i--) { /*i=tx index*/
  1410. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x00, RFREGOFFSETMASK, 0x20000 + i);
  1411. if (p_iqk_info->is_BTG) {
  1412. compared_pad = (u8)((0x1c000 & odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x78, RFREGOFFSETMASK)) >> 14);
  1413. compared_txbb = (u8)((0x07C00 & odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x5c, RFREGOFFSETMASK)) >> 10);
  1414. } else {
  1415. compared_pad = (u8)((0xe0 & odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x56, RFREGOFFSETMASK)) >> 5);
  1416. compared_txbb = (u8)((0x1f & odm_get_rf_reg(p_dm_odm, (enum odm_rf_radio_path_e)path, 0x56, RFREGOFFSETMASK)));
  1417. }
  1418. if (i == 0x1f) {
  1419. /*boundaryselect = compared_txbb;*/
  1420. boundaryselect = 0x1f;
  1421. tmp_pad = compared_pad;
  1422. }
  1423. if (compared_pad < tmp_pad) {
  1424. boundaryselect = boundaryselect + (i << (j * 5));
  1425. tmp_pad = compared_pad ;
  1426. j++;
  1427. }
  1428. if (j >= 4)
  1429. break;
  1430. }
  1431. #else
  1432. boundaryselect = 0x0;
  1433. #endif
  1434. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x00, RFREGOFFSETMASK, rf_backup_reg00);
  1435. odm_write_4byte(p_dm_odm, 0x1bbc, boundaryselect);
  1436. }
  1437. u8
  1438. _dpk_get_dpk_tx_agc_8821c(
  1439. struct PHY_DM_STRUCT *p_dm_odm,
  1440. u8 path
  1441. )
  1442. {
  1443. u8 tx_agc_init_value = 0x1f; /* DPK TXAGC value*/
  1444. u32 rf_reg00 = 0x0;
  1445. u8 gainloss = 0x1;
  1446. u8 best_tx_agc ;
  1447. u8 tmp;
  1448. u8 i = 0;
  1449. boolean notready = true;
  1450. u8 delay_count = 0x0;
  1451. u32 temp2;
  1452. /* rf_reg00 = 0x40000 + tx_agc_init_value; set TXAGC value */
  1453. if (*p_dm_odm->p_band_type == ODM_BAND_5G) {
  1454. tx_agc_init_value = 0x1d;
  1455. rf_reg00 = 0x40000 + tx_agc_init_value; /* set TXAGC value*/
  1456. odm_write_4byte(p_dm_odm, 0x1bc8, 0x000c55aa);
  1457. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x8F, RFREGOFFSETMASK, 0xA9C00);
  1458. } else {
  1459. tx_agc_init_value = 0x17;
  1460. rf_reg00 = 0x44000 + tx_agc_init_value; /* set TXAGC value*/
  1461. odm_write_4byte(p_dm_odm, 0x1bc8, 0x000c44aa);
  1462. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x8F, RFREGOFFSETMASK, 0xAEC00);
  1463. }
  1464. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x00, RFREGOFFSETMASK, rf_reg00);
  1465. odm_set_bb_reg(p_dm_odm, 0x1b8c, BIT(15) | BIT(14) | BIT(13), gainloss);
  1466. odm_set_bb_reg(p_dm_odm, 0x1bc8, BIT(31), 0x1);
  1467. odm_set_bb_reg(p_dm_odm, 0x8f8, BIT(25) | BIT(24) | BIT(23) | BIT(22), 0x5);
  1468. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000d18);
  1469. /*ODM_delay_ms(1);*/
  1470. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000d19);
  1471. ODM_delay_ms(2);
  1472. #if 0
  1473. ODM_delay_ms(100);
  1474. #else
  1475. while (notready) {
  1476. if (odm_read_4byte(p_dm_odm, 0xfa0) & BIT(27))/*if (odm_read_4byte(p_dm_odm, 0x1b00) == (IQK_CMD & 0xffffff0f))*/
  1477. notready = false;
  1478. else
  1479. notready = true;
  1480. if (notready) {
  1481. ODM_delay_ms(1);
  1482. delay_count++;
  1483. }
  1484. if (delay_count >= 50) {
  1485. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1486. ("[DPK]S%d DPK_GetDPKTXAGC_8821C timeout!!!\n", path));
  1487. break;
  1488. }
  1489. }
  1490. odm_write_4byte(p_dm_odm, 0x1b90, 0x0001e018);
  1491. #endif
  1492. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1493. ("rf_reg00 =0x%x, 0x8F =0x%x\n", odm_get_rf_reg(p_dm_odm, path, 0x00, RFREGOFFSETMASK), odm_get_rf_reg(p_dm_odm, path, 0x8f, RFREGOFFSETMASK)));
  1494. odm_write_4byte(p_dm_odm, 0x1bd4, 0x60001);
  1495. tmp = (u8)odm_read_4byte(p_dm_odm, 0x1bfc);
  1496. best_tx_agc = tx_agc_init_value - (0xa - tmp);
  1497. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1498. ("[DPK](2), 0x1b8c =0x%x, delay_count=%d, rf_reg00 = 0x%x, 0x1b00 = 0x%x, 0x1bfc = 0x%x, 0x1bd4 = 0x%x,best_tx_agc =0x%x, tmp =0x%x, delay =%d ms\n",
  1499. odm_read_4byte(p_dm_odm, 0x1b8c), delay_count, rf_reg00, odm_read_4byte(p_dm_odm, 0x1b00), odm_read_4byte(p_dm_odm, 0x1bfc), odm_read_4byte(p_dm_odm, 0x1bd4), best_tx_agc, tmp, i * 2));
  1500. /* dbg message*/
  1501. #if 0
  1502. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1503. ("[DQK] 3 0x1bcc = 0x%x, 0x1bb8 =0x%x\n", odm_read_4byte(p_dm_odm, 0x1bcc), odm_read_4byte(p_dm_odm, 0x1bb8)));
  1504. odm_write_4byte(p_dm_odm, 0x1bcc, 0x118f8800);
  1505. for (i = 0 ; i < 8; i++) {
  1506. odm_write_4byte(p_dm_odm, 0x1b90, 0x0101e018 + i);
  1507. odm_write_4byte(p_dm_odm, 0x1bd4, 0x00060000);
  1508. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1509. ("0x%x\n",
  1510. odm_read_4byte(p_dm_odm, 0x1bfc)));
  1511. odm_write_4byte(p_dm_odm, 0x1bd4, 0x00070000);
  1512. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1513. ("0x%x\n",
  1514. odm_read_4byte(p_dm_odm, 0x1bfc)));
  1515. odm_write_4byte(p_dm_odm, 0x1bd4, 0x00080000);
  1516. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1517. ("0x%x\n",
  1518. odm_read_4byte(p_dm_odm, 0x1bfc)));
  1519. odm_write_4byte(p_dm_odm, 0x1bd4, 0x00090000);
  1520. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1521. ("0x%x\n",
  1522. odm_read_4byte(p_dm_odm, 0x1bfc)));
  1523. }
  1524. odm_write_4byte(p_dm_odm, 0x1b90, 0x0001e018);
  1525. #endif
  1526. return best_tx_agc;
  1527. }
  1528. boolean
  1529. _dpk_enable_dpk_8821c(
  1530. struct PHY_DM_STRUCT *p_dm_odm,
  1531. u8 path,
  1532. u8 best_tx_agc
  1533. )
  1534. {
  1535. u32 rf_reg00 = 0x0;
  1536. u32 tmp;
  1537. boolean fail = true;
  1538. u8 i = 0;
  1539. boolean notready = true;
  1540. u8 delay_count = 0x0;
  1541. if (*p_dm_odm->p_band_type == ODM_BAND_5G) {
  1542. rf_reg00 = 0x40000 + best_tx_agc; /* set TXAGC value*/
  1543. } else {
  1544. rf_reg00 = 0x44000 + best_tx_agc; /* set TXAGC value*/
  1545. }
  1546. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x00, RFREGOFFSETMASK, rf_reg00);
  1547. ODM_delay_ms(1);
  1548. odm_set_bb_reg(p_dm_odm, 0x1bc8, BIT(31), 0x1);
  1549. odm_write_4byte(p_dm_odm, 0x8f8, 0x41400080);
  1550. ODM_delay_ms(1);
  1551. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000e18);
  1552. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000e19);
  1553. #if 0
  1554. ODM_delay_ms(10);
  1555. #else
  1556. ODM_delay_ms(5);
  1557. while (notready) {
  1558. if (odm_read_4byte(p_dm_odm, 0xfa0) & BIT(27))/*if (odm_read_4byte(p_dm_odm, 0x1b00) == (IQK_CMD & 0xffffff0f))*/
  1559. notready = false;
  1560. else
  1561. notready = true;
  1562. if (notready) {
  1563. ODM_delay_ms(1);
  1564. delay_count++;
  1565. }
  1566. if (delay_count >= 50) {
  1567. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1568. ("[DPK]S%d DPK_GetDPKTXAGC_8821C timeout!!!\n", path));
  1569. break;
  1570. }
  1571. }
  1572. #endif
  1573. odm_write_4byte(p_dm_odm, 0x1b90, 0x0001e018);
  1574. odm_write_4byte(p_dm_odm, 0x1bd4, 0xA0001);
  1575. tmp = odm_read_4byte(p_dm_odm, 0x1bfc);
  1576. if ((odm_read_4byte(p_dm_odm, 0x1b08) & 0x0f000000) == 0x0)
  1577. fail = false;
  1578. else
  1579. fail = true;
  1580. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1581. ("[DPK] (3), delay_count= %d, 0x1b0b = 0x%x, 0x1bc8 = 0x%x, rf_reg00 = 0x%x, ,0x1bfc = 0x%x, 0x1b90=0x%x, 0x1b94=0x%x\n",
  1582. delay_count, odm_read_1byte(p_dm_odm, 0x1b0b), odm_read_4byte(p_dm_odm, 0x1bc8), rf_reg00, tmp, odm_read_4byte(p_dm_odm, 0x1b90), odm_read_4byte(p_dm_odm, 0x1b94)));
  1583. /* dbg message*/
  1584. #if 0
  1585. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008);
  1586. odm_write_4byte(p_dm_odm, 0x1b08, 0x00000080);
  1587. odm_write_4byte(p_dm_odm, 0x1bd4, 0x00040001);
  1588. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1589. ("[DPK] SRAM value!!!\n"));
  1590. for (i = 0 ; i < 64; i++) {
  1591. /*odm_write_4byte(p_dm_odm, 0x1b90, 0x0101e018+i);*/
  1592. odm_write_4byte(p_dm_odm, 0x1bdc, 0xc0000081 + i * 2);
  1593. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1594. ("0x%x\n", odm_read_4byte(p_dm_odm, 0x1bfc)));
  1595. }
  1596. odm_write_4byte(p_dm_odm, 0x1bd4, 0x00050001);
  1597. for (i = 0 ; i < 64; i++) {
  1598. /*odm_write_4byte(p_dm_odm, 0x1b90, 0x0101e018+i);*/
  1599. odm_write_4byte(p_dm_odm, 0x1bdc, 0xc0000081 + i * 2);
  1600. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1601. ("0x%x\n", odm_read_4byte(p_dm_odm, 0x1bfc)));
  1602. }
  1603. /*odm_write_4byte(p_dm_odm, 0x1b08, 0x00000080);*/
  1604. odm_write_4byte(p_dm_odm, 0x1bd4, 0x00000001);
  1605. odm_write_4byte(p_dm_odm, 0x1bdc, 0x00000000);
  1606. #endif
  1607. return fail;
  1608. }
  1609. boolean
  1610. _dpk_enable_dpd_8821c(
  1611. struct PHY_DM_STRUCT *p_dm_odm,
  1612. u8 path,
  1613. u8 best_tx_agc
  1614. )
  1615. {
  1616. boolean fail = true;
  1617. u8 tmp;
  1618. u8 offset = 0x0;
  1619. u8 i = 0;
  1620. boolean notready = true;
  1621. u8 delay_count = 0x0;
  1622. odm_set_bb_reg(p_dm_odm, 0x1bc8, BIT(31), 0x1);
  1623. odm_write_4byte(p_dm_odm, 0x8f8, 0x41400080);
  1624. ODM_delay_ms(1);
  1625. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000f18);
  1626. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000f19);
  1627. ODM_delay_ms(30);
  1628. #if 0
  1629. ODM_delay_ms(100);
  1630. #else
  1631. while (notready) {
  1632. if (odm_read_4byte(p_dm_odm, 0xfa0) & BIT(27))/*if (odm_read_4byte(p_dm_odm, 0x1b00) == (IQK_CMD & 0xffffff0f))*/
  1633. notready = false;
  1634. else
  1635. notready = true;
  1636. if (notready) {
  1637. ODM_delay_ms(1);
  1638. delay_count++;
  1639. }
  1640. if (delay_count >= 50) {
  1641. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1642. ("[DPK]S%d DPK_GetDPKTXAGC_8821C timeout!!!\n", path));
  1643. break;
  1644. }
  1645. }
  1646. odm_write_4byte(p_dm_odm, 0x1b90, 0x0001e018);
  1647. #endif
  1648. odm_write_4byte(p_dm_odm, 0x1b90, 0x0001e018);
  1649. odm_write_4byte(p_dm_odm, 0x1bd4, 0xA0001);
  1650. tmp = odm_read_1byte(p_dm_odm, 0x1bfc);
  1651. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1652. ("[DPK](4) init 0x1b08 =%x, 0x1bc8 = 0x%x,0x1bfc = 0x%x, ,0x1bd0 = 0x%x, offset =%x, 1bcc =%x\n",
  1653. odm_read_4byte(p_dm_odm, 0x1b08), odm_read_4byte(p_dm_odm, 0x1bc8), tmp, odm_read_4byte(p_dm_odm, 0x1bd0), offset, odm_read_4byte(p_dm_odm, 0x1bcc)));
  1654. /*if( (odm_read_4byte(p_dm_odm, 0x1b08) & 0x0f000000) == 0x0)*/
  1655. if (true) {
  1656. odm_write_4byte(p_dm_odm, 0x1b98, 0x48004800);
  1657. odm_write_4byte(p_dm_odm, 0x1bdc, 0x0);
  1658. if (best_tx_agc >= 0x19)
  1659. offset = best_tx_agc - 0x19;
  1660. else
  1661. offset = 0x20 - (0x19 - best_tx_agc);
  1662. odm_set_bb_reg(p_dm_odm, 0x1bd0, BIT(12) | BIT(11) | BIT(10) | BIT(9) | BIT(8), offset);
  1663. fail = false;
  1664. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1665. ("[DPK](4) OK 0x1b08 =%x, 0x1bc8 = 0x%x,0x1bfc = 0x%x, ,0x1bd0 = 0x%x, offset =%x, 1bcc =%x\n",
  1666. odm_read_4byte(p_dm_odm, 0x1b08), odm_read_4byte(p_dm_odm, 0x1bc8), tmp, odm_read_4byte(p_dm_odm, 0x1bd0), offset, odm_read_4byte(p_dm_odm, 0x1bcc)));
  1667. } else
  1668. fail = true;
  1669. return fail;
  1670. }
  1671. void
  1672. phy_dpd_calibrate_8821c(
  1673. struct PHY_DM_STRUCT *p_dm_odm,
  1674. boolean reset
  1675. )
  1676. {
  1677. u32 backup_dpdbb[3];
  1678. u32 backup_dpdbb_reg[3] = {0x1b2c, 0x1b38, 0xc1b3c};
  1679. u8 best_tx_agc = 0x1c;
  1680. u32 MAC_backup[MAC_REG_NUM_8821C], RF_backup[RF_REG_NUM_8821C][1];
  1681. u32 backup_mac_reg[MAC_REG_NUM_8821C] = {0x520, 0x550, 0x1518};
  1682. u32 BB_backup[DPK_BB_REG_NUM_8821C];
  1683. u32 backup_bb_reg[DPK_BB_REG_NUM_8821C] = {0x808, 0x90c, 0xc00, 0xcb0, 0xcb4, 0xcbc, 0x1990, 0x9a4, 0xa04
  1684. , 0xc58, 0xc5c, 0xe58, 0xe5c, 0xc6c, 0xe6c, 0x810, 0x90c, 0xc94, 0xe94, 0x1904, 0xcb0, 0xcb4, 0xcbc, 0xc00
  1685. };
  1686. u32 backup_rf_reg[RF_REG_NUM_8821C] = {0xdf, 0xde, 0x8f, 0x65, 0x0, 0x1};
  1687. u8 i;
  1688. u32 backup_dpk_reg[3] = {0x1bd0, 0x1b98, 0x1bbc};
  1689. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  1690. p_iqk_info->is_BTG = (boolean) odm_get_bb_reg(p_dm_odm, 0xcb8, BIT(16));
  1691. if (!p_dm_odm->mp_mode)
  1692. if (_iqk_reload_iqk_8821c(p_dm_odm, reset))
  1693. return;
  1694. /*2G is not stable*/
  1695. /* if (!(*p_dm_odm->p_band_type == ODM_BAND_5G)) return; */
  1696. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1697. ("[DPK]==========DPK strat!!!!!==========\n"));
  1698. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1699. ("[DPK]p_band_type = %s, band_width = %d, ExtPA2G = %d, ext_pa_5g = %d\n", (*p_dm_odm->p_band_type == ODM_BAND_5G) ? "5G" : "2G", *p_dm_odm->p_band_width, p_dm_odm->ext_pa, p_dm_odm->ext_pa_5g));
  1700. #if 1
  1701. _iqk_backup_mac_bb_8821c(p_dm_odm, MAC_backup, BB_backup, backup_mac_reg, backup_bb_reg, DPK_BB_REG_NUM_8821C);
  1702. _iqk_afe_setting_8821c(p_dm_odm, true);
  1703. _iqk_backup_rf_8821c(p_dm_odm, RF_backup, backup_rf_reg);
  1704. #else
  1705. _iqk_rfe_setting_8821c(p_dm_odm, false);
  1706. _iqk_agc_bnd_int_8821c(p_dm_odm);
  1707. _iqk_rf_setting_8821c(p_dm_odm);
  1708. #endif
  1709. if (p_iqk_info->is_BTG) {
  1710. } else {
  1711. if (*p_dm_odm->p_band_type == ODM_BAND_2_4G)
  1712. odm_set_bb_reg(p_dm_odm, 0xcb8, BIT(8), 0x1);
  1713. else
  1714. odm_set_bb_reg(p_dm_odm, 0xcb8, BIT(8), 0x0);
  1715. }
  1716. /*backup 0x1b2c, 1b38,0x1b3c*/
  1717. {
  1718. backup_dpdbb[0] = odm_read_4byte(p_dm_odm, 0x1b2c);
  1719. }
  1720. {
  1721. backup_dpdbb[1] = odm_read_4byte(p_dm_odm, 0x1b38);
  1722. }
  1723. {
  1724. backup_dpdbb[2] = odm_read_4byte(p_dm_odm, 0x1b3c);
  1725. }
  1726. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1727. ("[DPK]In DPD Proces(1), Backup\n"));
  1728. #if 1
  1729. /*PDK Init Register setting*/
  1730. _dpk_dpk_setting_8821c(p_dm_odm, ODM_RF_PATH_A);
  1731. _dpk_dpk_boundary_selection_8821c(p_dm_odm, ODM_RF_PATH_A);
  1732. odm_set_bb_reg(p_dm_odm, 0x1bc8, BIT(31), 0x1);
  1733. odm_set_bb_reg(p_dm_odm, 0x8f8, BIT(25) | BIT(24) | BIT(23) | BIT(22), 0x5);
  1734. /* Get the best TXAGC*/
  1735. #endif
  1736. #if 1
  1737. best_tx_agc = _dpk_get_dpk_tx_agc_8821c(p_dm_odm, ODM_RF_PATH_A);
  1738. #endif
  1739. ODM_delay_ms(2);
  1740. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1741. ("[DPK]In DPD Process(2), Best TXAGC = 0x%x\n", best_tx_agc));
  1742. #if 1
  1743. if (_dpk_enable_dpk_8821c(p_dm_odm, ODM_RF_PATH_A, best_tx_agc)) {
  1744. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1745. ("[DPK]In DPD Process(3), DPK process is Fail\n"));
  1746. }
  1747. #endif
  1748. #if 1
  1749. ODM_delay_ms(2);
  1750. if (_dpk_enable_dpd_8821c(p_dm_odm, ODM_RF_PATH_A, best_tx_agc)) {
  1751. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1752. ("[DPK]In DPD Process(4), DPD process is Fail\n"));
  1753. }
  1754. #endif
  1755. /* restore IQK */
  1756. p_iqk_info->rf_reg18 = odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x18, RFREGOFFSETMASK);
  1757. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[DPK]reload IQK result before, p_iqk_info->rf_reg18=0x%x, p_iqk_info->iqk_channel[0]=0x%x, p_iqk_info->iqk_channel[1]=0x%x!!!!\n", p_iqk_info->rf_reg18, p_iqk_info->iqk_channel[0], p_iqk_info->iqk_channel[1]));
  1758. _iqk_reload_iqk_setting_8821c(p_dm_odm, 0, 2);
  1759. _iqk_fill_iqk_report_8821c(p_dm_odm, 0);
  1760. /* Restore setup */
  1761. odm_set_bb_reg(p_dm_odm, 0x8f8, BIT(25) | BIT(24) | BIT(23) | BIT(22), 0x5);
  1762. odm_set_bb_reg(p_dm_odm, 0x1bd4, BIT(20) | BIT(19) | BIT(18) | BIT(17) | BIT(16), 0x0);
  1763. odm_set_bb_reg(p_dm_odm, 0x1b00, BIT(2) | BIT(1), 0x0);
  1764. odm_set_bb_reg(p_dm_odm, 0x1b08, BIT(6) | BIT(5), 0x2);
  1765. odm_write_4byte(p_dm_odm, 0x1b2c, backup_dpdbb[0]);
  1766. odm_write_4byte(p_dm_odm, 0x1b38, backup_dpdbb[1]);
  1767. odm_write_4byte(p_dm_odm, 0x1b3c, backup_dpdbb[2]);
  1768. /*enable DPK*/
  1769. odm_set_bb_reg(p_dm_odm, 0x1b2c, BIT(7) | BIT(6) | BIT(5) | BIT(4) | BIT(3) | BIT(2) | BIT(1) | BIT(0), 0x5);
  1770. /*enable boundary condition*/
  1771. #if dpk_forcein_sram4 /* disable : froce in sram4*/
  1772. odm_set_bb_reg(p_dm_odm, 0x1bcc, BIT(27), 0x1);
  1773. #endif
  1774. odm_write_4byte(p_dm_odm, 0x1bcc, 0x11868800);
  1775. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1776. ("[DPK]In DPD Process(5), Restore\n"));
  1777. #if 1
  1778. _iqk_restore_mac_bb_8821c(p_dm_odm, MAC_backup, BB_backup, backup_mac_reg, backup_bb_reg, DPK_BB_REG_NUM_8821C);
  1779. _iqk_afe_setting_8821c(p_dm_odm, false);
  1780. _iqk_restore_rf_8821c(p_dm_odm, backup_rf_reg, RF_backup);
  1781. #else
  1782. _iqk_restore_rf_8821c(p_dm_odm, backup_rf_reg, RF_backup);
  1783. #endif
  1784. /* backup the DPK current result*/
  1785. for (i = 0; i < DPK_BACKUP_REG_NUM_8821C; i++)
  1786. dpk_result[i] = odm_read_4byte(p_dm_odm, backup_dpk_reg[i]);
  1787. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1788. ("[DPK]the DPD calibration Process Finish (6), dpk_result = 0x%x\n", dpk_result[0]));
  1789. return;
  1790. }
  1791. void
  1792. _phy_iq_calibrate_8821c(
  1793. struct PHY_DM_STRUCT *p_dm_odm,
  1794. boolean reset
  1795. )
  1796. {
  1797. u32 MAC_backup[MAC_REG_NUM_8821C], BB_backup[BB_REG_NUM_8821C], RF_backup[RF_REG_NUM_8821C][1];
  1798. u32 backup_mac_reg[MAC_REG_NUM_8821C] = {0x520, 0x550, 0x1518};
  1799. u32 backup_bb_reg[BB_REG_NUM_8821C] = {0x808, 0x90c, 0xc00, 0xcb0, 0xcb4, 0xcbc, 0x1990, 0x9a4, 0xa04, 0xb00};
  1800. u32 backup_rf_reg[RF_REG_NUM_8821C] = {0xdf, 0xde, 0x8f, 0x65, 0x0, 0x1};
  1801. u8 i, j;
  1802. boolean segment_iqk = false, is_mp = false;
  1803. struct _IQK_INFORMATION *p_iqk_info = &p_dm_odm->IQK_info;
  1804. if (p_dm_odm->mp_mode)
  1805. is_mp = true;
  1806. else if (p_dm_odm->is_linked)
  1807. segment_iqk = false;
  1808. p_iqk_info->is_BTG = (boolean)odm_get_bb_reg(p_dm_odm, 0xcb8, BIT(16));
  1809. if (!is_mp)
  1810. if (_iqk_reload_iqk_8821c(p_dm_odm, reset))
  1811. return;
  1812. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1813. ("[IQK]==========IQK strat!!!!!==========\n"));
  1814. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1815. ("[IQK]p_band_type = %s, band_width = %d, ExtPA2G = %d, ext_pa_5g = %d\n", (*p_dm_odm->p_band_type == ODM_BAND_5G) ? "5G" : "2G", *p_dm_odm->p_band_width, p_dm_odm->ext_pa, p_dm_odm->ext_pa_5g));
  1816. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1817. ("[IQK]Interface = %d, cut_version = %x\n", p_dm_odm->support_interface, p_dm_odm->cut_version));
  1818. p_iqk_info->tmp_GNTWL = _iqk_indirect_read_reg(p_dm_odm, 0x38);
  1819. p_iqk_info->iqk_times++;
  1820. p_iqk_info->kcount = 0;
  1821. p_dm_odm->rf_calibrate_info.iqk_total_progressing_time = 0;
  1822. p_dm_odm->rf_calibrate_info.iqk_step = 1;
  1823. p_iqk_info->rxiqk_step = 1;
  1824. _iqk_backup_iqk_8821c(p_dm_odm, 0);
  1825. _iqk_backup_mac_bb_8821c(p_dm_odm, MAC_backup, BB_backup, backup_mac_reg, backup_bb_reg,BB_REG_NUM_8821C);
  1826. _iqk_backup_rf_8821c(p_dm_odm, RF_backup, backup_rf_reg);
  1827. #if 0
  1828. _iqk_configure_macbb_8821c(p_dm_odm);
  1829. _iqk_afe_setting_8821c(p_dm_odm, true);
  1830. _iqk_rfe_setting_8821c(p_dm_odm, false);
  1831. _iqk_agc_bnd_int_8821c(p_dm_odm);
  1832. _IQK_RFSetting_8821C(p_dm_odm);
  1833. #endif
  1834. while (1) {
  1835. if (!is_mp)
  1836. p_dm_odm->rf_calibrate_info.iqk_start_time = odm_get_current_time(p_dm_odm);
  1837. _iqk_configure_macbb_8821c(p_dm_odm);
  1838. _iqk_afe_setting_8821c(p_dm_odm, true);
  1839. _iqk_rfe_setting_8821c(p_dm_odm, false);
  1840. _iqk_agc_bnd_int_8821c(p_dm_odm);
  1841. _iqk_rfsetting_8821c(p_dm_odm);
  1842. _iqk_start_iqk_8821c(p_dm_odm, segment_iqk);
  1843. _iqk_afe_setting_8821c(p_dm_odm, false);
  1844. _iqk_restore_mac_bb_8821c(p_dm_odm, MAC_backup, BB_backup, backup_mac_reg, backup_bb_reg,BB_REG_NUM_8821C);
  1845. _iqk_restore_rf_8821c(p_dm_odm, backup_rf_reg, RF_backup);
  1846. if (!is_mp) {
  1847. p_dm_odm->rf_calibrate_info.iqk_progressing_time = odm_get_progressing_time(p_dm_odm, p_dm_odm->rf_calibrate_info.iqk_start_time);
  1848. p_dm_odm->rf_calibrate_info.iqk_total_progressing_time += odm_get_progressing_time(p_dm_odm, p_dm_odm->rf_calibrate_info.iqk_start_time);
  1849. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  1850. ("[IQK]IQK progressing_time = %lld ms\n", p_dm_odm->rf_calibrate_info.iqk_progressing_time));
  1851. }
  1852. if (p_dm_odm->rf_calibrate_info.iqk_step == 4)
  1853. break;
  1854. p_iqk_info->kcount = 0;
  1855. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]delay 50ms!!!\n"));
  1856. ODM_delay_ms(50);
  1857. };
  1858. _iqk_backup_iqk_8821c(p_dm_odm, 1);
  1859. #if 0
  1860. _iqk_afe_setting_8821c(p_dm_odm, false);
  1861. _iqk_restore_mac_bb_8821c(p_dm_odm, MAC_backup, BB_backup, backup_mac_reg, backup_bb_reg);
  1862. _iqk_restore_rf_8821c(p_dm_odm, backup_rf_reg, RF_backup);
  1863. #endif
  1864. _iqk_fill_iqk_report_8821c(p_dm_odm, 0);
  1865. if (!is_mp)
  1866. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]Total IQK progressing_time = %lld ms\n",
  1867. p_dm_odm->rf_calibrate_info.iqk_total_progressing_time));
  1868. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1869. ("[IQK]==========IQK end!!!!!==========\n"));
  1870. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]check 0x49c = %x\n", odm_read_1byte(p_dm_odm, 0x49c)));
  1871. }
  1872. void
  1873. _phy_iq_calibrate_by_fw_8821c(
  1874. struct PHY_DM_STRUCT *p_dm_odm,
  1875. u8 clear
  1876. )
  1877. {
  1878. u8 iqk_cmd[3] = { *p_dm_odm->p_channel, 0x0, 0x0};
  1879. u8 buf1 = 0x0;
  1880. u8 buf2 = 0x0;
  1881. }
  1882. /*IQK version:0xe, NCTL:0x7*/
  1883. /*1. disable segment IQK*/
  1884. void
  1885. phy_iq_calibrate_8821c(
  1886. void *p_dm_void,
  1887. boolean clear
  1888. )
  1889. {
  1890. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  1891. u32 counter = 0x0;
  1892. #if !(DM_ODM_SUPPORT_TYPE & ODM_AP)
  1893. struct _ADAPTER *p_adapter = p_dm_odm->adapter;
  1894. HAL_DATA_TYPE *p_hal_data = GET_HAL_DATA(p_adapter);
  1895. #if (MP_DRIVER == 1)
  1896. #if (DM_ODM_SUPPORT_TYPE == ODM_WIN)
  1897. PMPT_CONTEXT p_mpt_ctx = &(p_adapter->MptCtx);
  1898. #else
  1899. PMPT_CONTEXT p_mpt_ctx = &(p_adapter->mppriv.mpt_ctx);
  1900. #endif
  1901. #endif
  1902. #if (DM_ODM_SUPPORT_TYPE & (ODM_WIN))
  1903. if (odm_check_power_status(p_adapter) == false)
  1904. return;
  1905. #endif
  1906. #if MP_DRIVER == 1
  1907. #if (DM_ODM_SUPPORT_TYPE == ODM_WIN)
  1908. if (p_mpt_ctx->bSingleTone || p_mpt_ctx->bCarrierSuppression)
  1909. return;
  1910. #else
  1911. if (p_mpt_ctx->is_single_tone || p_mpt_ctx->is_carrier_suppression)
  1912. return;
  1913. #endif
  1914. #endif
  1915. #endif
  1916. if (!p_dm_odm->mp_mode)
  1917. _iqk_check_coex_status(p_dm_odm, true);
  1918. if (*(p_dm_odm->p_is_scan_in_process)) {
  1919. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]scan is in process, bypass IQK\n"));
  1920. return;
  1921. }
  1922. p_dm_odm->iqk_fw_offload = 0;
  1923. /*FW IQK*/
  1924. if (p_dm_odm->iqk_fw_offload) {
  1925. if (!p_dm_odm->rf_calibrate_info.is_iqk_in_progress) {
  1926. odm_acquire_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1927. p_dm_odm->rf_calibrate_info.is_iqk_in_progress = true;
  1928. odm_release_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1929. p_dm_odm->rf_calibrate_info.iqk_start_time = odm_get_current_time(p_dm_odm);
  1930. odm_write_4byte(p_dm_odm, 0x1b00, 0xf8000008);
  1931. odm_set_bb_reg(p_dm_odm, 0x1bf0, 0xff000000, 0xff);
  1932. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE,
  1933. ("[IQK]0x1bf0 = 0x%x\n", odm_read_4byte(p_dm_odm, 0x1bf0)));
  1934. _phy_iq_calibrate_by_fw_8821c(p_dm_odm, clear);
  1935. while (1) {
  1936. if (((odm_read_4byte(p_dm_odm, 0x1bf0) >> 24) == 0x7f) || (counter > 300))
  1937. break;
  1938. counter++;
  1939. ODM_delay_ms(1);
  1940. };
  1941. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_TRACE, ("[IQK]counter = %d\n", counter));
  1942. p_dm_odm->rf_calibrate_info.iqk_progressing_time = odm_get_progressing_time(p_dm_odm, p_dm_odm->rf_calibrate_info.iqk_start_time);
  1943. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]IQK progressing_time = %lld ms\n", p_dm_odm->rf_calibrate_info.iqk_progressing_time));
  1944. odm_acquire_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1945. p_dm_odm->rf_calibrate_info.is_iqk_in_progress = false;
  1946. odm_release_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1947. } else
  1948. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("== Return the IQK CMD, because the IQK in Progress ==\n"));
  1949. } else {
  1950. _iq_calibrate_8821c_init(p_dm_void);
  1951. if (!p_dm_odm->rf_calibrate_info.is_iqk_in_progress) {
  1952. odm_acquire_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1953. p_dm_odm->rf_calibrate_info.is_iqk_in_progress = true;
  1954. odm_release_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1955. if (p_dm_odm->mp_mode)
  1956. p_dm_odm->rf_calibrate_info.iqk_start_time = odm_get_current_time(p_dm_odm);
  1957. #if (DM_ODM_SUPPORT_TYPE & (ODM_CE))
  1958. _phy_iq_calibrate_8821c(p_dm_odm, clear);
  1959. /*DBG_871X("%s,%d, do IQK %u ms\n", __func__, __LINE__, rtw_get_passing_time_ms(time_iqk));*/
  1960. #else
  1961. _phy_iq_calibrate_8821c(p_dm_odm, clear);
  1962. #endif
  1963. if (p_dm_odm->mp_mode) {
  1964. p_dm_odm->rf_calibrate_info.iqk_progressing_time = odm_get_progressing_time(p_dm_odm, p_dm_odm->rf_calibrate_info.iqk_start_time);
  1965. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]IQK progressing_time = %lld ms\n", p_dm_odm->rf_calibrate_info.iqk_progressing_time));
  1966. }
  1967. odm_acquire_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1968. p_dm_odm->rf_calibrate_info.is_iqk_in_progress = false;
  1969. odm_release_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  1970. } else
  1971. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[IQK]== Return the IQK CMD, because the IQK in Progress ==\n"));
  1972. }
  1973. #if (DM_ODM_SUPPORT_TYPE & ODM_AP)
  1974. _iqk_iqk_fail_report_8821c(p_dm_odm);
  1975. #endif
  1976. if (!p_dm_odm->mp_mode)
  1977. _iqk_check_coex_status(p_dm_odm, false);
  1978. RT_TRACE(COMP_COEX, DBG_LOUD, ("[IQK]final 0x49c = %x\n", odm_read_1byte(p_dm_odm, 0x49c)));
  1979. }
  1980. void
  1981. phy_dp_calibrate_8821c(
  1982. void *p_dm_void,
  1983. boolean clear
  1984. )
  1985. {
  1986. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *)p_dm_void;
  1987. u32 counter = 0x0;
  1988. #if !(DM_ODM_SUPPORT_TYPE & ODM_AP)
  1989. struct _ADAPTER *p_adapter = p_dm_odm->adapter;
  1990. HAL_DATA_TYPE *p_hal_data = GET_HAL_DATA(p_adapter);
  1991. #if (MP_DRIVER == 1)
  1992. #if (DM_ODM_SUPPORT_TYPE == ODM_WIN)
  1993. PMPT_CONTEXT p_mpt_ctx = &(p_adapter->MptCtx);
  1994. #else
  1995. PMPT_CONTEXT p_mpt_ctx = &(p_adapter->mppriv.mpt_ctx);
  1996. #endif
  1997. #endif
  1998. #if (DM_ODM_SUPPORT_TYPE & (ODM_WIN))
  1999. if (odm_check_power_status(p_adapter) == false)
  2000. return;
  2001. #endif
  2002. #if MP_DRIVER == 1
  2003. #if (DM_ODM_SUPPORT_TYPE == ODM_WIN)
  2004. if (p_mpt_ctx->bSingleTone || p_mpt_ctx->bCarrierSuppression)
  2005. return;
  2006. #else
  2007. if (p_mpt_ctx->is_single_tone || p_mpt_ctx->is_carrier_suppression)
  2008. return;
  2009. #endif
  2010. #endif
  2011. #endif
  2012. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[DPK] In PHY, p_dm_odm->dpk_en == %x\n", p_dm_odm->dpk_en));
  2013. /*if dpk is not enable*/
  2014. if (p_dm_odm->dpk_en == 0x0)
  2015. return;
  2016. /*start*/
  2017. if (!p_dm_odm->rf_calibrate_info.is_iqk_in_progress) {
  2018. odm_acquire_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  2019. p_dm_odm->rf_calibrate_info.is_iqk_in_progress = true;
  2020. odm_release_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  2021. if (p_dm_odm->mp_mode)
  2022. p_dm_odm->rf_calibrate_info.iqk_start_time = odm_get_current_time(p_dm_odm);
  2023. /*do DPK*/
  2024. phy_dpd_calibrate_8821c(p_dm_odm, clear);
  2025. if (p_dm_odm->mp_mode) {
  2026. p_dm_odm->rf_calibrate_info.iqk_progressing_time = odm_get_progressing_time(p_dm_odm, p_dm_odm->rf_calibrate_info.iqk_start_time);
  2027. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[DPK]DPK progressing_time = %lld ms\n", p_dm_odm->rf_calibrate_info.iqk_progressing_time));
  2028. }
  2029. odm_acquire_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  2030. p_dm_odm->rf_calibrate_info.is_iqk_in_progress = false;
  2031. odm_release_spin_lock(p_dm_odm, RT_IQK_SPINLOCK);
  2032. } else
  2033. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[DPK]== Return the DPK CMD, because the DPK in Progress ==\n"));
  2034. }
  2035. void dpk_temperature_compensate_8821c(
  2036. void *p_dm_void
  2037. )
  2038. {
  2039. struct PHY_DM_STRUCT *p_dm_odm = (struct PHY_DM_STRUCT *) p_dm_void;
  2040. struct _ADAPTER *adapter = p_dm_odm->adapter;
  2041. HAL_DATA_TYPE *p_hal_data = GET_HAL_DATA(adapter);
  2042. static u8 dpk_tm_trigger = 0;
  2043. u8 thermal_value = 0, delta_dpk, p = 0, i = 0;
  2044. u8 thermal_value_avg_count = 0;
  2045. u8 thermal_value_avg_times = 2;
  2046. u32 thermal_value_avg = 0;
  2047. u8 tmp, abs_temperature;
  2048. /*if dpk is not enable*/
  2049. if (p_dm_odm->dpk_en == 0x0)
  2050. return;
  2051. if (!dpk_tm_trigger) {
  2052. odm_set_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x42, BIT(17) | BIT(16), 0x03);
  2053. /*ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[DPK] (1) Trigger Thermal Meter!!\n"));*/
  2054. dpk_tm_trigger = 1;
  2055. return;
  2056. } else {
  2057. /* Initialize */
  2058. dpk_tm_trigger = 0;
  2059. /*ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD, ("[DPK] (2) calculate the thermal !!\n"));
  2060. */
  2061. /* calculate average thermal meter */
  2062. thermal_value = (u8)odm_get_rf_reg(p_dm_odm, ODM_RF_PATH_A, 0x42, 0xfc00); /*0x42: RF Reg[15:10] 88E*/
  2063. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  2064. ("[DPK] (3) current Thermal Meter = %d\n", thermal_value));
  2065. p_dm_odm->rf_calibrate_info.thermal_value_dpk = thermal_value;
  2066. p_dm_odm->rf_calibrate_info.thermal_value_avg[p_dm_odm->rf_calibrate_info.thermal_value_avg_index] = thermal_value;
  2067. p_dm_odm->rf_calibrate_info.thermal_value_avg_index++;
  2068. if (p_dm_odm->rf_calibrate_info.thermal_value_avg_index == thermal_value_avg_times)
  2069. p_dm_odm->rf_calibrate_info.thermal_value_avg_index = 0;
  2070. for (i = 0; i < thermal_value_avg_times; i++) {
  2071. if (p_dm_odm->rf_calibrate_info.thermal_value_avg[i]) {
  2072. thermal_value_avg += p_dm_odm->rf_calibrate_info.thermal_value_avg[i];
  2073. thermal_value_avg_count++;
  2074. }
  2075. }
  2076. if (thermal_value_avg_count) /*Calculate Average thermal_value after average enough times*/
  2077. thermal_value = (u8)(thermal_value_avg / thermal_value_avg_count);
  2078. /* compensate the DPK */
  2079. delta_dpk = (thermal_value > p_hal_data->eeprom_thermal_meter) ? (thermal_value - p_hal_data->eeprom_thermal_meter) : (p_hal_data->eeprom_thermal_meter - thermal_value);
  2080. tmp = (u8)((dpk_result[0] & 0x00001f00) >> 8);
  2081. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  2082. ("[DPK] (5)delta_dpk = %d, eeprom_thermal_meter = %d, tmp=%d\n", delta_dpk, p_hal_data->eeprom_thermal_meter, tmp));
  2083. if (thermal_value > p_hal_data->eeprom_thermal_meter) {
  2084. abs_temperature = thermal_value - p_hal_data->eeprom_thermal_meter;
  2085. if (abs_temperature >= 20)
  2086. tmp = tmp + 4;
  2087. else if (abs_temperature >= 15)
  2088. tmp = tmp + 3;
  2089. else if (abs_temperature >= 10)
  2090. tmp = tmp + 2;
  2091. else if (abs_temperature >= 5)
  2092. tmp = tmp + 1;
  2093. } else { /*low temperature*/
  2094. abs_temperature = p_hal_data->eeprom_thermal_meter - thermal_value;
  2095. if (abs_temperature >= 20)
  2096. tmp = tmp - 4;
  2097. else if (abs_temperature >= 15)
  2098. tmp = tmp - 3;
  2099. else if (abs_temperature >= 10)
  2100. tmp = tmp - 2;
  2101. else if (abs_temperature >= 5)
  2102. tmp = tmp - 1;
  2103. }
  2104. odm_set_bb_reg(p_dm_odm, 0x1bd0, BIT(12) | BIT(11) | BIT(10) | BIT(9) | BIT(8), tmp);
  2105. ODM_RT_TRACE(p_dm_odm, ODM_COMP_CALIBRATION, ODM_DBG_LOUD,
  2106. ("[DPK] (6)delta_dpk = %d, eeprom_thermal_meter = %d, new tmp=%d, 0x1bd0=0x%x\n", delta_dpk, p_hal_data->eeprom_thermal_meter, tmp, odm_read_4byte(p_dm_odm, 0x1bd0)));
  2107. }
  2108. }
  2109. #endif