Lines Matching refs:hba

42 	struct ufs_hba *hba = dev_get_drvdata(dev);  in ufs_sysfs_pm_lvl_store()  local
51 spin_lock_irqsave(hba->host->host_lock, flags); in ufs_sysfs_pm_lvl_store()
53 hba->rpm_lvl = value; in ufs_sysfs_pm_lvl_store()
55 hba->spm_lvl = value; in ufs_sysfs_pm_lvl_store()
56 spin_unlock_irqrestore(hba->host->host_lock, flags); in ufs_sysfs_pm_lvl_store()
63 struct ufs_hba *hba = dev_get_drvdata(dev); in rpm_lvl_show() local
65 return sysfs_emit(buf, "%d\n", hba->rpm_lvl); in rpm_lvl_show()
77 struct ufs_hba *hba = dev_get_drvdata(dev); in rpm_target_dev_state_show() local
80 ufs_pm_lvl_states[hba->rpm_lvl].dev_state)); in rpm_target_dev_state_show()
86 struct ufs_hba *hba = dev_get_drvdata(dev); in rpm_target_link_state_show() local
89 ufs_pm_lvl_states[hba->rpm_lvl].link_state)); in rpm_target_link_state_show()
95 struct ufs_hba *hba = dev_get_drvdata(dev); in spm_lvl_show() local
97 return sysfs_emit(buf, "%d\n", hba->spm_lvl); in spm_lvl_show()
109 struct ufs_hba *hba = dev_get_drvdata(dev); in spm_target_dev_state_show() local
112 ufs_pm_lvl_states[hba->spm_lvl].dev_state)); in spm_target_dev_state_show()
118 struct ufs_hba *hba = dev_get_drvdata(dev); in spm_target_link_state_show() local
121 ufs_pm_lvl_states[hba->spm_lvl].link_state)); in spm_target_link_state_show()
152 struct ufs_hba *hba = dev_get_drvdata(dev); in auto_hibern8_show() local
154 if (!ufshcd_is_auto_hibern8_supported(hba)) in auto_hibern8_show()
157 pm_runtime_get_sync(hba->dev); in auto_hibern8_show()
158 ufshcd_hold(hba, false); in auto_hibern8_show()
159 ahit = ufshcd_readl(hba, REG_AUTO_HIBERNATE_IDLE_TIMER); in auto_hibern8_show()
160 ufshcd_release(hba); in auto_hibern8_show()
161 pm_runtime_put_sync(hba->dev); in auto_hibern8_show()
170 struct ufs_hba *hba = dev_get_drvdata(dev); in auto_hibern8_store() local
173 if (!ufshcd_is_auto_hibern8_supported(hba)) in auto_hibern8_store()
182 ufshcd_auto_hibern8_update(hba, ufshcd_us_to_ahit(timer)); in auto_hibern8_store()
213 struct ufs_hba *hba = dev_get_drvdata(dev); in monitor_enable_show() local
215 return sysfs_emit(buf, "%d\n", hba->monitor.enabled); in monitor_enable_show()
222 struct ufs_hba *hba = dev_get_drvdata(dev); in monitor_enable_store() local
229 spin_lock_irqsave(hba->host->host_lock, flags); in monitor_enable_store()
230 if (value == hba->monitor.enabled) in monitor_enable_store()
234 memset(&hba->monitor, 0, sizeof(hba->monitor)); in monitor_enable_store()
236 hba->monitor.enabled = true; in monitor_enable_store()
237 hba->monitor.enabled_ts = ktime_get(); in monitor_enable_store()
241 spin_unlock_irqrestore(hba->host->host_lock, flags); in monitor_enable_store()
248 struct ufs_hba *hba = dev_get_drvdata(dev); in monitor_chunk_size_show() local
250 return sysfs_emit(buf, "%lu\n", hba->monitor.chunk_size); in monitor_chunk_size_show()
257 struct ufs_hba *hba = dev_get_drvdata(dev); in monitor_chunk_size_store() local
263 spin_lock_irqsave(hba->host->host_lock, flags); in monitor_chunk_size_store()
265 if (!hba->monitor.enabled) in monitor_chunk_size_store()
266 hba->monitor.chunk_size = value; in monitor_chunk_size_store()
267 spin_unlock_irqrestore(hba->host->host_lock, flags); in monitor_chunk_size_store()
274 struct ufs_hba *hba = dev_get_drvdata(dev); in read_total_sectors_show() local
276 return sysfs_emit(buf, "%lu\n", hba->monitor.nr_sec_rw[READ]); in read_total_sectors_show()
282 struct ufs_hba *hba = dev_get_drvdata(dev); in read_total_busy_show() local
285 ktime_to_us(hba->monitor.total_busy[READ])); in read_total_busy_show()
291 struct ufs_hba *hba = dev_get_drvdata(dev); in read_nr_requests_show() local
293 return sysfs_emit(buf, "%lu\n", hba->monitor.nr_req[READ]); in read_nr_requests_show()
300 struct ufs_hba *hba = dev_get_drvdata(dev); in read_req_latency_avg_show() local
301 struct ufs_hba_monitor *m = &hba->monitor; in read_req_latency_avg_show()
311 struct ufs_hba *hba = dev_get_drvdata(dev); in read_req_latency_max_show() local
314 ktime_to_us(hba->monitor.lat_max[READ])); in read_req_latency_max_show()
321 struct ufs_hba *hba = dev_get_drvdata(dev); in read_req_latency_min_show() local
324 ktime_to_us(hba->monitor.lat_min[READ])); in read_req_latency_min_show()
331 struct ufs_hba *hba = dev_get_drvdata(dev); in read_req_latency_sum_show() local
334 ktime_to_us(hba->monitor.lat_sum[READ])); in read_req_latency_sum_show()
341 struct ufs_hba *hba = dev_get_drvdata(dev); in write_total_sectors_show() local
343 return sysfs_emit(buf, "%lu\n", hba->monitor.nr_sec_rw[WRITE]); in write_total_sectors_show()
349 struct ufs_hba *hba = dev_get_drvdata(dev); in write_total_busy_show() local
352 ktime_to_us(hba->monitor.total_busy[WRITE])); in write_total_busy_show()
358 struct ufs_hba *hba = dev_get_drvdata(dev); in write_nr_requests_show() local
360 return sysfs_emit(buf, "%lu\n", hba->monitor.nr_req[WRITE]); in write_nr_requests_show()
367 struct ufs_hba *hba = dev_get_drvdata(dev); in write_req_latency_avg_show() local
368 struct ufs_hba_monitor *m = &hba->monitor; in write_req_latency_avg_show()
378 struct ufs_hba *hba = dev_get_drvdata(dev); in write_req_latency_max_show() local
381 ktime_to_us(hba->monitor.lat_max[WRITE])); in write_req_latency_max_show()
388 struct ufs_hba *hba = dev_get_drvdata(dev); in write_req_latency_min_show() local
391 ktime_to_us(hba->monitor.lat_min[WRITE])); in write_req_latency_min_show()
398 struct ufs_hba *hba = dev_get_drvdata(dev); in write_req_latency_sum_show() local
401 ktime_to_us(hba->monitor.lat_sum[WRITE])); in write_req_latency_sum_show()
446 static ssize_t ufs_sysfs_read_desc_param(struct ufs_hba *hba, in ufs_sysfs_read_desc_param() argument
459 pm_runtime_get_sync(hba->dev); in ufs_sysfs_read_desc_param()
460 ret = ufshcd_read_desc_param(hba, desc_id, desc_index, in ufs_sysfs_read_desc_param()
462 pm_runtime_put_sync(hba->dev); in ufs_sysfs_read_desc_param()
490 struct ufs_hba *hba = dev_get_drvdata(dev); \
491 return ufs_sysfs_read_desc_param(hba, QUERY_DESC_IDN_##_duname, \
715 struct ufs_hba *hba = dev_get_drvdata(dev); \
716 return ufs_sysfs_read_desc_param(hba, QUERY_DESC_IDN_POWER, 0, \
832 struct ufs_hba *hba = dev_get_drvdata(dev); \
840 pm_runtime_get_sync(hba->dev); \
841 ret = ufshcd_query_descriptor_retry(hba, \
851 ret = ufshcd_read_string_desc(hba, index, &desc_buf, \
857 pm_runtime_put_sync(hba->dev); \
896 struct ufs_hba *hba = dev_get_drvdata(dev); \
898 index = ufshcd_wb_get_query_index(hba); \
899 pm_runtime_get_sync(hba->dev); \
900 ret = ufshcd_query_flag(hba, UPIU_QUERY_OPCODE_READ_FLAG, \
902 pm_runtime_put_sync(hba->dev); \
953 struct ufs_hba *hba = dev_get_drvdata(dev); \
958 index = ufshcd_wb_get_query_index(hba); \
959 pm_runtime_get_sync(hba->dev); \
960 ret = ufshcd_query_attr(hba, UPIU_QUERY_OPCODE_READ_ATTR, \
962 pm_runtime_put_sync(hba->dev); \
1041 struct ufs_hba *hba = shost_priv(sdev->host); \
1043 if (!ufs_is_valid_unit_desc_lun(&hba->dev_info, lun, \
1046 return ufs_sysfs_read_desc_param(hba, QUERY_DESC_IDN_##_duname, \
1104 struct ufs_hba *hba = shost_priv(sdev->host); in dyn_cap_needed_attribute_show() local
1108 pm_runtime_get_sync(hba->dev); in dyn_cap_needed_attribute_show()
1109 ret = ufshcd_query_attr(hba, UPIU_QUERY_OPCODE_READ_ATTR, in dyn_cap_needed_attribute_show()
1111 pm_runtime_put_sync(hba->dev); in dyn_cap_needed_attribute_show()
1127 void ufs_sysfs_add_nodes(struct ufs_hba *hba) in ufs_sysfs_add_nodes() argument
1131 ret = sysfs_create_groups(&hba->dev->kobj, ufs_sysfs_groups); in ufs_sysfs_add_nodes()
1133 dev_err(hba->dev, in ufs_sysfs_add_nodes()
1139 trace_android_vh_ufs_update_sysfs(hba); in ufs_sysfs_add_nodes()