Lines Matching refs:le32_to_cpu
70 u32 len = le32_to_cpu(tlv->length); in iwl_dbg_tlv_add()
86 u32 type = le32_to_cpu(tlv->type); in iwl_dbg_tlv_ver_support()
88 u32 ver = le32_to_cpu(hdr->version); in iwl_dbg_tlv_ver_support()
102 if (le32_to_cpu(tlv->length) != sizeof(*debug_info)) in iwl_dbg_tlv_alloc_debug_info()
118 if (le32_to_cpu(tlv->length) != sizeof(*alloc)) in iwl_dbg_tlv_alloc_buf_alloc()
121 buf_location = le32_to_cpu(alloc->buf_location); in iwl_dbg_tlv_alloc_buf_alloc()
122 alloc_id = le32_to_cpu(alloc->alloc_id); in iwl_dbg_tlv_alloc_buf_alloc()
154 u32 tp = le32_to_cpu(hcmd->time_point); in iwl_dbg_tlv_alloc_hcmd()
156 if (le32_to_cpu(tlv->length) <= sizeof(*hcmd)) in iwl_dbg_tlv_alloc_hcmd()
179 u32 id = le32_to_cpu(reg->id); in iwl_dbg_tlv_alloc_region()
180 u32 type = le32_to_cpu(reg->type); in iwl_dbg_tlv_alloc_region()
181 u32 tlv_len = sizeof(*tlv) + le32_to_cpu(tlv->length); in iwl_dbg_tlv_alloc_region()
187 if (le32_to_cpu(reg->hdr.version) == 2) in iwl_dbg_tlv_alloc_region()
190 if (le32_to_cpu(tlv->length) < sizeof(*reg)) in iwl_dbg_tlv_alloc_region()
235 u32 tp = le32_to_cpu(trig->time_point); in iwl_dbg_tlv_alloc_trigger()
239 if (le32_to_cpu(tlv->length) < sizeof(*trig)) in iwl_dbg_tlv_alloc_trigger()
250 if (!le32_to_cpu(trig->occurrences)) { in iwl_dbg_tlv_alloc_trigger()
251 dup = kmemdup(tlv, sizeof(*tlv) + le32_to_cpu(tlv->length), in iwl_dbg_tlv_alloc_trigger()
270 u32 tp = le32_to_cpu(conf_set->time_point); in iwl_dbg_tlv_config_set()
271 u32 type = le32_to_cpu(conf_set->set_type); in iwl_dbg_tlv_config_set()
304 u32 type = le32_to_cpu(tlv->type); in iwl_dbg_tlv_alloc()
306 u32 domain = le32_to_cpu(hdr->domain); in iwl_dbg_tlv_alloc()
326 le32_to_cpu(hdr->version)); in iwl_dbg_tlv_alloc()
453 tlv_len = le32_to_cpu(tlv->length); in iwl_dbg_tlv_parse_bin()
567 num_frags = le32_to_cpu(fw_mon_cfg->max_frags_num); in iwl_dbg_tlv_alloc_fragments()
575 remain_pages = DIV_ROUND_UP(le32_to_cpu(fw_mon_cfg->req_size), in iwl_dbg_tlv_alloc_fragments()
595 u32 alloc_size = le32_to_cpu(fw_mon_cfg->req_size) - in iwl_dbg_tlv_alloc_fragments()
598 if (alloc_size < le32_to_cpu(fw_mon_cfg->min_size)) { in iwl_dbg_tlv_alloc_fragments()
628 if (le32_to_cpu(fwrt->trans->dbg.fw_mon_cfg[alloc_id].buf_location) != in iwl_dbg_tlv_apply_buffer()
710 if (le32_to_cpu(fwrt->trans->dbg.fw_mon_cfg[alloc_id].buf_location) != in iwl_dbg_tlv_update_dram()
790 u16 hcmd_len = le32_to_cpu(node->tlv.length) - sizeof(*hcmd); in iwl_dbg_tlv_send_hcmds()
809 u32 len = (le32_to_cpu(node->tlv.length) - sizeof(*config_list)) / 8; in iwl_dbg_tlv_apply_config()
810 u32 type = le32_to_cpu(config_list->set_type); in iwl_dbg_tlv_apply_config()
811 u32 offset = le32_to_cpu(config_list->addr_offset); in iwl_dbg_tlv_apply_config()
822 address = le32_to_cpu(config_list->addr_val[count].address); in iwl_dbg_tlv_apply_config()
823 value = le32_to_cpu(config_list->addr_val[count].value); in iwl_dbg_tlv_apply_config()
831 address = le32_to_cpu(config_list->addr_val[count].address); in iwl_dbg_tlv_apply_config()
832 value = le32_to_cpu(config_list->addr_val[count].value); in iwl_dbg_tlv_apply_config()
841 address = le32_to_cpu(config_list->addr_val[count].address); in iwl_dbg_tlv_apply_config()
842 value = le32_to_cpu(config_list->addr_val[count].value); in iwl_dbg_tlv_apply_config()
860 le32_to_cpu(config_list->addr_offset)); in iwl_dbg_tlv_apply_config()
862 address = le32_to_cpu(config_list->addr_val[count].address); in iwl_dbg_tlv_apply_config()
867 dram_info.dbgc1_size = cpu_to_le32(le32_to_cpu(dram_size) - 0x400); in iwl_dbg_tlv_apply_config()
879 le32_to_cpu(config_list->addr_val[0].value); in iwl_dbg_tlv_apply_config()
903 u32 occur = le32_to_cpu(dump_data.trig->occurrences); in iwl_dbg_tlv_periodic_trig_handler()
904 u32 collect_interval = le32_to_cpu(dump_data.trig->data[0]); in iwl_dbg_tlv_periodic_trig_handler()
922 u32 occur = le32_to_cpu(trig->occurrences), collect_interval; in iwl_dbg_tlv_set_periodic_trigs()
931 if (le32_to_cpu(node->tlv.length) < in iwl_dbg_tlv_set_periodic_trigs()
938 if (le32_to_cpu(trig->data[0]) < min_interval) { in iwl_dbg_tlv_set_periodic_trigs()
941 le32_to_cpu(trig->data[0]), min_interval); in iwl_dbg_tlv_set_periodic_trigs()
945 collect_interval = le32_to_cpu(trig->data[0]); in iwl_dbg_tlv_set_periodic_trigs()
1002 u32 policy = le32_to_cpu(trig->apply_policy); in iwl_dbg_tlv_override_trig_node()
1003 u32 size = le32_to_cpu(trig_tlv->length); in iwl_dbg_tlv_override_trig_node()
1008 u32 data_len = le32_to_cpu(node_tlv->length) - in iwl_dbg_tlv_override_trig_node()
1013 le32_to_cpu(trig->time_point)); in iwl_dbg_tlv_override_trig_node()
1020 le32_to_cpu(trig->time_point)); in iwl_dbg_tlv_override_trig_node()
1023 if (size != le32_to_cpu(node_tlv->length)) { in iwl_dbg_tlv_override_trig_node()
1033 le32_to_cpu(trig->time_point)); in iwl_dbg_tlv_override_trig_node()
1051 le32_to_cpu(trig->time_point)); in iwl_dbg_tlv_override_trig_node()
1060 le32_to_cpu(trig->time_point)); in iwl_dbg_tlv_override_trig_node()
1066 le32_to_cpu(trig->time_point)); in iwl_dbg_tlv_override_trig_node()
1081 u32 policy = le32_to_cpu(trig->apply_policy); in iwl_dbg_tlv_add_active_trigger()
1096 le32_to_cpu(trig->time_point)); in iwl_dbg_tlv_add_active_trigger()
1172 le32_to_cpu(dump_data.trig->data[i]))) { in iwl_dbg_tlv_tp_trigger()
1209 u32 dest = le32_to_cpu(fw_mon_cfg->buf_location); in iwl_dbg_tlv_init_cfg()
1247 reg_type = le32_to_cpu(reg->type); in iwl_dbg_tlv_init_cfg()
1250 !(BIT(le32_to_cpu(reg->dram_alloc_id)) & failed_alloc)) in iwl_dbg_tlv_init_cfg()
1255 le32_to_cpu(reg->dram_alloc_id), i); in iwl_dbg_tlv_init_cfg()
1257 failed_alloc &= ~le32_to_cpu(reg->dram_alloc_id); in iwl_dbg_tlv_init_cfg()