Lines Matching refs:tf

574 static int ata_rwcmd_protocol(struct ata_taskfile *tf, struct ata_device *dev)  in ata_rwcmd_protocol()  argument
580 fua = (tf->flags & ATA_TFLAG_FUA) ? 4 : 0; in ata_rwcmd_protocol()
581 lba48 = (tf->flags & ATA_TFLAG_LBA48) ? 2 : 0; in ata_rwcmd_protocol()
582 write = (tf->flags & ATA_TFLAG_WRITE) ? 1 : 0; in ata_rwcmd_protocol()
585 tf->protocol = ATA_PROT_PIO; in ata_rwcmd_protocol()
589 tf->protocol = ATA_PROT_PIO; in ata_rwcmd_protocol()
592 tf->protocol = ATA_PROT_DMA; in ata_rwcmd_protocol()
598 tf->command = cmd; in ata_rwcmd_protocol()
619 u64 ata_tf_read_block(const struct ata_taskfile *tf, struct ata_device *dev) in ata_tf_read_block() argument
623 if (tf->flags & ATA_TFLAG_LBA) { in ata_tf_read_block()
624 if (tf->flags & ATA_TFLAG_LBA48) { in ata_tf_read_block()
625 block |= (u64)tf->hob_lbah << 40; in ata_tf_read_block()
626 block |= (u64)tf->hob_lbam << 32; in ata_tf_read_block()
627 block |= (u64)tf->hob_lbal << 24; in ata_tf_read_block()
629 block |= (tf->device & 0xf) << 24; in ata_tf_read_block()
631 block |= tf->lbah << 16; in ata_tf_read_block()
632 block |= tf->lbam << 8; in ata_tf_read_block()
633 block |= tf->lbal; in ata_tf_read_block()
637 cyl = tf->lbam | (tf->lbah << 8); in ata_tf_read_block()
638 head = tf->device & 0xf; in ata_tf_read_block()
639 sect = tf->lbal; in ata_tf_read_block()
674 int ata_build_rw_tf(struct ata_taskfile *tf, struct ata_device *dev, in ata_build_rw_tf() argument
678 tf->flags |= ATA_TFLAG_ISADDR | ATA_TFLAG_DEVICE; in ata_build_rw_tf()
679 tf->flags |= tf_flags; in ata_build_rw_tf()
686 tf->protocol = ATA_PROT_NCQ; in ata_build_rw_tf()
687 tf->flags |= ATA_TFLAG_LBA | ATA_TFLAG_LBA48; in ata_build_rw_tf()
689 if (tf->flags & ATA_TFLAG_WRITE) in ata_build_rw_tf()
690 tf->command = ATA_CMD_FPDMA_WRITE; in ata_build_rw_tf()
692 tf->command = ATA_CMD_FPDMA_READ; in ata_build_rw_tf()
694 tf->nsect = tag << 3; in ata_build_rw_tf()
695 tf->hob_feature = (n_block >> 8) & 0xff; in ata_build_rw_tf()
696 tf->feature = n_block & 0xff; in ata_build_rw_tf()
698 tf->hob_lbah = (block >> 40) & 0xff; in ata_build_rw_tf()
699 tf->hob_lbam = (block >> 32) & 0xff; in ata_build_rw_tf()
700 tf->hob_lbal = (block >> 24) & 0xff; in ata_build_rw_tf()
701 tf->lbah = (block >> 16) & 0xff; in ata_build_rw_tf()
702 tf->lbam = (block >> 8) & 0xff; in ata_build_rw_tf()
703 tf->lbal = block & 0xff; in ata_build_rw_tf()
705 tf->device = ATA_LBA; in ata_build_rw_tf()
706 if (tf->flags & ATA_TFLAG_FUA) in ata_build_rw_tf()
707 tf->device |= 1 << 7; in ata_build_rw_tf()
711 tf->hob_nsect |= ATA_PRIO_HIGH << in ata_build_rw_tf()
715 tf->flags |= ATA_TFLAG_LBA; in ata_build_rw_tf()
719 tf->device |= (block >> 24) & 0xf; in ata_build_rw_tf()
725 tf->flags |= ATA_TFLAG_LBA48; in ata_build_rw_tf()
727 tf->hob_nsect = (n_block >> 8) & 0xff; in ata_build_rw_tf()
729 tf->hob_lbah = (block >> 40) & 0xff; in ata_build_rw_tf()
730 tf->hob_lbam = (block >> 32) & 0xff; in ata_build_rw_tf()
731 tf->hob_lbal = (block >> 24) & 0xff; in ata_build_rw_tf()
736 if (unlikely(ata_rwcmd_protocol(tf, dev) < 0)) in ata_build_rw_tf()
739 tf->nsect = n_block & 0xff; in ata_build_rw_tf()
741 tf->lbah = (block >> 16) & 0xff; in ata_build_rw_tf()
742 tf->lbam = (block >> 8) & 0xff; in ata_build_rw_tf()
743 tf->lbal = block & 0xff; in ata_build_rw_tf()
745 tf->device |= ATA_LBA; in ata_build_rw_tf()
754 if (unlikely(ata_rwcmd_protocol(tf, dev) < 0)) in ata_build_rw_tf()
773 tf->nsect = n_block & 0xff; /* Sector count 0 means 256 sectors */ in ata_build_rw_tf()
774 tf->lbal = sect; in ata_build_rw_tf()
775 tf->lbam = cyl; in ata_build_rw_tf()
776 tf->lbah = cyl >> 8; in ata_build_rw_tf()
777 tf->device |= head; in ata_build_rw_tf()
986 unsigned int ata_dev_classify(const struct ata_taskfile *tf) in ata_dev_classify() argument
1009 if ((tf->lbam == 0) && (tf->lbah == 0)) { in ata_dev_classify()
1014 if ((tf->lbam == 0x14) && (tf->lbah == 0xeb)) { in ata_dev_classify()
1019 if ((tf->lbam == 0x69) && (tf->lbah == 0x96)) { in ata_dev_classify()
1024 if ((tf->lbam == 0x3c) && (tf->lbah == 0xc3)) { in ata_dev_classify()
1029 if ((tf->lbam == 0xcd) && (tf->lbah == 0xab)) { in ata_dev_classify()
1121 u64 ata_tf_to_lba48(const struct ata_taskfile *tf) in ata_tf_to_lba48() argument
1125 sectors |= ((u64)(tf->hob_lbah & 0xff)) << 40; in ata_tf_to_lba48()
1126 sectors |= ((u64)(tf->hob_lbam & 0xff)) << 32; in ata_tf_to_lba48()
1127 sectors |= ((u64)(tf->hob_lbal & 0xff)) << 24; in ata_tf_to_lba48()
1128 sectors |= (tf->lbah & 0xff) << 16; in ata_tf_to_lba48()
1129 sectors |= (tf->lbam & 0xff) << 8; in ata_tf_to_lba48()
1130 sectors |= (tf->lbal & 0xff); in ata_tf_to_lba48()
1135 u64 ata_tf_to_lba(const struct ata_taskfile *tf) in ata_tf_to_lba() argument
1139 sectors |= (tf->device & 0x0f) << 24; in ata_tf_to_lba()
1140 sectors |= (tf->lbah & 0xff) << 16; in ata_tf_to_lba()
1141 sectors |= (tf->lbam & 0xff) << 8; in ata_tf_to_lba()
1142 sectors |= (tf->lbal & 0xff); in ata_tf_to_lba()
1162 struct ata_taskfile tf; in ata_read_native_max_address() local
1165 ata_tf_init(dev, &tf); in ata_read_native_max_address()
1168 tf.flags |= ATA_TFLAG_DEVICE | ATA_TFLAG_ISADDR; in ata_read_native_max_address()
1171 tf.command = ATA_CMD_READ_NATIVE_MAX_EXT; in ata_read_native_max_address()
1172 tf.flags |= ATA_TFLAG_LBA48; in ata_read_native_max_address()
1174 tf.command = ATA_CMD_READ_NATIVE_MAX; in ata_read_native_max_address()
1176 tf.protocol = ATA_PROT_NODATA; in ata_read_native_max_address()
1177 tf.device |= ATA_LBA; in ata_read_native_max_address()
1179 err_mask = ata_exec_internal(dev, &tf, NULL, DMA_NONE, NULL, 0, 0); in ata_read_native_max_address()
1184 if (err_mask == AC_ERR_DEV && (tf.feature & ATA_ABORTED)) in ata_read_native_max_address()
1190 *max_sectors = ata_tf_to_lba48(&tf) + 1; in ata_read_native_max_address()
1192 *max_sectors = ata_tf_to_lba(&tf) + 1; in ata_read_native_max_address()
1213 struct ata_taskfile tf; in ata_set_max_sectors() local
1218 ata_tf_init(dev, &tf); in ata_set_max_sectors()
1220 tf.flags |= ATA_TFLAG_DEVICE | ATA_TFLAG_ISADDR; in ata_set_max_sectors()
1223 tf.command = ATA_CMD_SET_MAX_EXT; in ata_set_max_sectors()
1224 tf.flags |= ATA_TFLAG_LBA48; in ata_set_max_sectors()
1226 tf.hob_lbal = (new_sectors >> 24) & 0xff; in ata_set_max_sectors()
1227 tf.hob_lbam = (new_sectors >> 32) & 0xff; in ata_set_max_sectors()
1228 tf.hob_lbah = (new_sectors >> 40) & 0xff; in ata_set_max_sectors()
1230 tf.command = ATA_CMD_SET_MAX; in ata_set_max_sectors()
1232 tf.device |= (new_sectors >> 24) & 0xf; in ata_set_max_sectors()
1235 tf.protocol = ATA_PROT_NODATA; in ata_set_max_sectors()
1236 tf.device |= ATA_LBA; in ata_set_max_sectors()
1238 tf.lbal = (new_sectors >> 0) & 0xff; in ata_set_max_sectors()
1239 tf.lbam = (new_sectors >> 8) & 0xff; in ata_set_max_sectors()
1240 tf.lbah = (new_sectors >> 16) & 0xff; in ata_set_max_sectors()
1242 err_mask = ata_exec_internal(dev, &tf, NULL, DMA_NONE, NULL, 0, 0); in ata_set_max_sectors()
1248 (tf.feature & (ATA_ABORTED | ATA_IDNF))) in ata_set_max_sectors()
1492 struct ata_taskfile *tf, const u8 *cdb, in ata_exec_internal_sg() argument
1498 u8 command = tf->command; in ata_exec_internal_sg()
1538 qc->tf = *tf; in ata_exec_internal_sg()
1543 if (tf->protocol == ATAPI_PROT_DMA && (dev->flags & ATA_DFLAG_DMADIR) && in ata_exec_internal_sg()
1545 qc->tf.feature |= ATAPI_DMADIR; in ata_exec_internal_sg()
1624 } else if (qc->tf.command == ATA_CMD_REQ_SENSE_DATA) { in ata_exec_internal_sg()
1631 *tf = qc->result_tf; in ata_exec_internal_sg()
1668 struct ata_taskfile *tf, const u8 *cdb, in ata_exec_internal() argument
1682 return ata_exec_internal_sg(dev, tf, cdb, dma_dir, psg, n_elem, in ata_exec_internal()
1754 struct ata_taskfile *tf, u16 *id) in ata_do_dev_read_id() argument
1756 return ata_exec_internal(dev, tf, NULL, DMA_FROM_DEVICE, in ata_do_dev_read_id()
1787 struct ata_taskfile tf; in ata_dev_read_id() local
1798 ata_tf_init(dev, &tf); in ata_dev_read_id()
1806 tf.command = ATA_CMD_ID_ATA; in ata_dev_read_id()
1809 tf.command = ATA_CMD_ID_ATAPI; in ata_dev_read_id()
1817 tf.protocol = ATA_PROT_PIO; in ata_dev_read_id()
1822 tf.flags |= ATA_TFLAG_ISADDR | ATA_TFLAG_DEVICE; in ata_dev_read_id()
1827 tf.flags |= ATA_TFLAG_POLLING; in ata_dev_read_id()
1830 err_mask = ap->ops->read_id(dev, &tf, id); in ata_dev_read_id()
1832 err_mask = ata_do_dev_read_id(dev, &tf, id); in ata_dev_read_id()
1848 if ((err_mask == AC_ERR_DEV) && (tf.feature & ATA_ABORTED)) { in ata_dev_read_id()
1992 struct ata_taskfile tf; in ata_read_log_page() local
2006 ata_tf_init(dev, &tf); in ata_read_log_page()
2009 tf.command = ATA_CMD_READ_LOG_DMA_EXT; in ata_read_log_page()
2010 tf.protocol = ATA_PROT_DMA; in ata_read_log_page()
2013 tf.command = ATA_CMD_READ_LOG_EXT; in ata_read_log_page()
2014 tf.protocol = ATA_PROT_PIO; in ata_read_log_page()
2017 tf.lbal = log; in ata_read_log_page()
2018 tf.lbam = page; in ata_read_log_page()
2019 tf.nsect = sectors; in ata_read_log_page()
2020 tf.hob_nsect = sectors >> 8; in ata_read_log_page()
2021 tf.flags |= ATA_TFLAG_ISADDR | ATA_TFLAG_LBA48 | ATA_TFLAG_DEVICE; in ata_read_log_page()
2023 err_mask = ata_exec_internal(dev, &tf, NULL, DMA_FROM_DEVICE, in ata_read_log_page()
4242 struct ata_taskfile tf; in ata_dev_set_xfermode() local
4251 ata_tf_init(dev, &tf); in ata_dev_set_xfermode()
4252 tf.command = ATA_CMD_SET_FEATURES; in ata_dev_set_xfermode()
4253 tf.feature = SETFEATURES_XFER; in ata_dev_set_xfermode()
4254 tf.flags |= ATA_TFLAG_ISADDR | ATA_TFLAG_DEVICE | ATA_TFLAG_POLLING; in ata_dev_set_xfermode()
4255 tf.protocol = ATA_PROT_NODATA; in ata_dev_set_xfermode()
4258 tf.nsect = dev->xfer_mode; in ata_dev_set_xfermode()
4261 tf.nsect = 0x01; in ata_dev_set_xfermode()
4266 err_mask = ata_exec_internal(dev, &tf, NULL, DMA_NONE, NULL, 0, 15000); in ata_dev_set_xfermode()
4289 struct ata_taskfile tf; in ata_dev_set_feature() local
4296 ata_tf_init(dev, &tf); in ata_dev_set_feature()
4297 tf.command = ATA_CMD_SET_FEATURES; in ata_dev_set_feature()
4298 tf.feature = enable; in ata_dev_set_feature()
4299 tf.flags |= ATA_TFLAG_ISADDR | ATA_TFLAG_DEVICE; in ata_dev_set_feature()
4300 tf.protocol = ATA_PROT_NODATA; in ata_dev_set_feature()
4301 tf.nsect = feature; in ata_dev_set_feature()
4306 err_mask = ata_exec_internal(dev, &tf, NULL, DMA_NONE, NULL, 0, timeout); in ata_dev_set_feature()
4328 struct ata_taskfile tf; in ata_dev_init_params() local
4338 ata_tf_init(dev, &tf); in ata_dev_init_params()
4339 tf.command = ATA_CMD_INIT_DEV_PARAMS; in ata_dev_init_params()
4340 tf.flags |= ATA_TFLAG_ISADDR | ATA_TFLAG_DEVICE; in ata_dev_init_params()
4341 tf.protocol = ATA_PROT_NODATA; in ata_dev_init_params()
4342 tf.nsect = sectors; in ata_dev_init_params()
4343 tf.device |= (heads - 1) & 0x0f; /* max head = num. of heads - 1 */ in ata_dev_init_params()
4345 err_mask = ata_exec_internal(dev, &tf, NULL, DMA_NONE, NULL, 0, 0); in ata_dev_init_params()
4349 if (err_mask == AC_ERR_DEV && (tf.feature & ATA_ABORTED)) in ata_dev_init_params()
4406 if (ata_is_ncq(qc->tf.protocol)) { in ata_std_qc_defer()
4611 if (ata_is_ncq(qc->tf.protocol)) { in __ata_qc_complete()
4640 qc->result_tf.flags = qc->tf.flags; in fill_result_tf()
4648 if (!ata_is_data(qc->tf.protocol)) in ata_verify_xfer()
4651 if ((dev->mwdma_mask || dev->udma_mask) && ata_is_pio(qc->tf.protocol)) in ata_verify_xfer()
4677 ledtrig_disk_activity(!!(qc->tf.flags & ATA_TFLAG_WRITE)); in ata_qc_complete()
4731 switch (qc->tf.command) { in ata_qc_complete()
4733 if (qc->tf.feature != SETFEATURES_WC_ON && in ata_qc_complete()
4734 qc->tf.feature != SETFEATURES_WC_OFF && in ata_qc_complete()
4735 qc->tf.feature != SETFEATURES_RA_ON && in ata_qc_complete()
4736 qc->tf.feature != SETFEATURES_RA_OFF) in ata_qc_complete()
4808 u8 prot = qc->tf.protocol; in ata_qc_issue()