| /OK3568_Linux_fs/u-boot/drivers/ddr/marvell/axp/ |
| H A D | ddr3_pbs.c | 97 u32 pup, dq, pups, cur_max_pup, valid_pup, reg; in ddr3_pbs_tx() local 126 for (dq = 0; dq < DQ_NUM; dq++) in ddr3_pbs_tx() 127 skew_sum_array[pup][dq] = 0; in ddr3_pbs_tx() 174 for (dq = 0; dq < DQ_NUM; dq++) { in ddr3_pbs_tx() 177 (max_pup - 1)][dq] = in ddr3_pbs_tx() 201 for (dq = 0; dq < DQ_NUM; dq++) { in ddr3_pbs_tx() 207 [dq], CS0, (1 - ecc) * in ddr3_pbs_tx() 254 for (dq = 0; dq < DQ_NUM; dq++) { in ddr3_pbs_tx() 263 DEBUG_PBS_D(dq, 1); in ddr3_pbs_tx() 267 dq], 2); in ddr3_pbs_tx() [all …]
|
| H A D | ddr3_dqs.c | 313 u32 dq; in ddr3_find_adll_limits() local 347 for (dq = 0; dq < DQ_NUM; dq++) { in ddr3_find_adll_limits() 348 analog_pbs_sum[pup][dq][0] = adll_start_val; in ddr3_find_adll_limits() 349 analog_pbs_sum[pup][dq][1] = adll_end_val; in ddr3_find_adll_limits() 379 for (dq = 0; dq < DQ_NUM; dq++) { in ddr3_find_adll_limits() 380 analog_pbs[victim_dq][pup][dq][0] = in ddr3_find_adll_limits() 382 analog_pbs[victim_dq][pup][dq][1] = in ddr3_find_adll_limits() 384 per_bit_data[pup][dq] = 0; in ddr3_find_adll_limits() 463 for (dq = 0; dq < DQ_NUM; dq++) { in ddr3_find_adll_limits() 464 if ((analog_pbs[victim_dq][pup][dq][0] != adll_start_val) in ddr3_find_adll_limits() [all …]
|
| H A D | ddr3_sdram.c | 99 __maybe_unused u32 dq; in compare_pattern_v1() local 115 for (dq = 0; dq < DQ_NUM; dq++) { in compare_pattern_v1() 118 if (((var1 >> dq) & 0x1) != in compare_pattern_v1() 119 ((var2 >> dq) & 0x1)) in compare_pattern_v1() 120 per_bit_data[val][dq] = 1; in compare_pattern_v1() 122 per_bit_data[val][dq] = 0; in compare_pattern_v1() 172 __maybe_unused u32 dq; in ddr3_sdram_compare() local 292 u32 ui, dq, pup; in ddr3_sdram_pbs_compare() local 364 for (dq = 0; dq < DQ_NUM; dq++) { in ddr3_sdram_pbs_compare() 367 if (((var1 >> dq) & 0x1) != in ddr3_sdram_pbs_compare() [all …]
|
| /OK3568_Linux_fs/kernel/include/soc/fsl/ |
| H A D | dpaa2-global.h | 20 struct dq { struct 33 } dq; member 68 static inline u32 dpaa2_dq_flags(const struct dpaa2_dq *dq) in dpaa2_dq_flags() argument 70 return dq->dq.stat; in dpaa2_dq_flags() 80 static inline int dpaa2_dq_is_pull(const struct dpaa2_dq *dq) in dpaa2_dq_is_pull() argument 82 return (int)(dpaa2_dq_flags(dq) & DPAA2_DQ_STAT_VOLATILE); in dpaa2_dq_is_pull() 91 static inline bool dpaa2_dq_is_pull_complete(const struct dpaa2_dq *dq) in dpaa2_dq_is_pull_complete() argument 93 return !!(dpaa2_dq_flags(dq) & DPAA2_DQ_STAT_EXPIRED); in dpaa2_dq_is_pull_complete() 104 static inline u16 dpaa2_dq_seqnum(const struct dpaa2_dq *dq) in dpaa2_dq_seqnum() argument 106 return le16_to_cpu(dq->dq.seqnum); in dpaa2_dq_seqnum() [all …]
|
| /OK3568_Linux_fs/kernel/lib/raid6/ |
| H A D | recov_avx2.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx2() local 33 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx2() 35 ptrs[disks-1] = dq; in raid6_2data_recov_avx2() 41 ptrs[failb] = dq; in raid6_2data_recov_avx2() 61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2() 62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2() 120 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0])); in raid6_2data_recov_avx2() 121 asm volatile("vmovdqa %%ymm13,%0" : "=m" (dq[32])); in raid6_2data_recov_avx2() 132 dq += 64; in raid6_2data_recov_avx2() 136 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (*dq)); in raid6_2data_recov_avx2() [all …]
|
| H A D | recov_avx512.c | 27 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx512() local 44 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx512() 46 ptrs[disks-1] = dq; in raid6_2data_recov_avx512() 52 ptrs[failb] = dq; in raid6_2data_recov_avx512() 78 "m" (p[64]), "m" (dq[0]), "m" (dq[64]), in raid6_2data_recov_avx512() 148 : "m" (dq[0]), "m" (dq[64])); in raid6_2data_recov_avx512() 159 dq += 128; in raid6_2data_recov_avx512() 166 : "m" (*q), "m" (*p), "m"(*dq), "m" (*dp)); in raid6_2data_recov_avx512() 209 : "m" (dq[0])); in raid6_2data_recov_avx512() 220 dq += 64; in raid6_2data_recov_avx512() [all …]
|
| H A D | recov_ssse3.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_ssse3() local 35 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_ssse3() 37 ptrs[disks-1] = dq; in raid6_2data_recov_ssse3() 43 ptrs[failb] = dq; in raid6_2data_recov_ssse3() 71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3() 72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3() 124 asm volatile("movdqa %%xmm1,%0" : "=m" (dq[0])); in raid6_2data_recov_ssse3() 125 asm volatile("movdqa %%xmm9,%0" : "=m" (dq[16])); in raid6_2data_recov_ssse3() 136 dq += 32; in raid6_2data_recov_ssse3() 140 asm volatile("pxor %0,%%xmm1" : : "m" (*dq)); in raid6_2data_recov_ssse3() [all …]
|
| H A D | recov_s390xc.c | 26 u8 *p, *q, *dp, *dq; in raid6_2data_recov_s390xc() local 40 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_s390xc() 42 ptrs[disks-1] = dq; in raid6_2data_recov_s390xc() 48 ptrs[failb] = dq; in raid6_2data_recov_s390xc() 59 xor_block(dq, q); in raid6_2data_recov_s390xc() 61 dq[i] = pbmul[dp[i]] ^ qmul[dq[i]]; in raid6_2data_recov_s390xc() 62 xor_block(dp, dq); in raid6_2data_recov_s390xc() 66 dq += 256; in raid6_2data_recov_s390xc() 75 u8 *p, *q, *dq; in raid6_datap_recov_s390xc() local 84 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_s390xc() [all …]
|
| H A D | recov.c | 23 u8 *p, *q, *dp, *dq; in raid6_2data_recov_intx1() local 37 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_intx1() 39 ptrs[disks-1] = dq; in raid6_2data_recov_intx1() 45 ptrs[failb] = dq; in raid6_2data_recov_intx1() 56 qx = qmul[*q ^ *dq]; in raid6_2data_recov_intx1() 57 *dq++ = db = pbmul[px] ^ qx; /* Reconstructed B */ in raid6_2data_recov_intx1() 67 u8 *p, *q, *dq; in raid6_datap_recov_intx1() local 75 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_intx1() 77 ptrs[disks-1] = dq; in raid6_datap_recov_intx1() 82 ptrs[faila] = dq; in raid6_datap_recov_intx1() [all …]
|
| H A D | recov_neon.c | 23 uint8_t *dq, const uint8_t *pbmul, 26 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq, 32 u8 *p, *q, *dp, *dq; in raid6_2data_recov_neon() local 47 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_neon() 49 ptrs[disks - 1] = dq; in raid6_2data_recov_neon() 55 ptrs[failb] = dq; in raid6_2data_recov_neon() 65 __raid6_2data_recov_neon(bytes, p, q, dp, dq, pbmul, qmul); in raid6_2data_recov_neon() 72 u8 *p, *q, *dq; in raid6_datap_recov_neon() local 82 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_neon() 84 ptrs[disks - 1] = dq; in raid6_datap_recov_neon() [all …]
|
| H A D | recov_neon_inner.c | 28 uint8_t *dq, const uint8_t *pbmul, in __raid6_2data_recov_neon() argument 53 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_2data_recov_neon() 66 vst1q_u8(dq, db); in __raid6_2data_recov_neon() 73 dq += 16; in __raid6_2data_recov_neon() 77 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq, in __raid6_datap_recov_neon() argument 94 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_datap_recov_neon() 102 vst1q_u8(dq, vx); in __raid6_datap_recov_neon() 108 dq += 16; in __raid6_datap_recov_neon()
|
| /OK3568_Linux_fs/kernel/fs/xfs/scrub/ |
| H A D | quota.c | 75 struct xfs_dquot *dq, in xchk_quota_item() argument 94 offset = dq->q_id / qi->qi_dqperchunk; in xchk_quota_item() 95 if (dq->q_id && dq->q_id <= sqi->last_id) in xchk_quota_item() 98 sqi->last_id = dq->q_id; in xchk_quota_item() 108 if (dq->q_blk.hardlimit > mp->m_sb.sb_dblocks) in xchk_quota_item() 110 if (dq->q_blk.softlimit > dq->q_blk.hardlimit) in xchk_quota_item() 113 if (dq->q_ino.hardlimit > M_IGEO(mp)->maxicount) in xchk_quota_item() 115 if (dq->q_ino.softlimit > dq->q_ino.hardlimit) in xchk_quota_item() 118 if (dq->q_rtb.hardlimit > mp->m_sb.sb_rblocks) in xchk_quota_item() 120 if (dq->q_rtb.softlimit > dq->q_rtb.hardlimit) in xchk_quota_item() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/soc/fsl/dpio/ |
| H A D | qbman-portal.h | 212 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct dpaa2_dq *dq); 214 int qbman_result_has_new_result(struct qbman_swp *p, const struct dpaa2_dq *dq); 301 static inline int qbman_result_is_DQ(const struct dpaa2_dq *dq) in qbman_result_is_DQ() argument 303 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_DQ); in qbman_result_is_DQ() 311 static inline int qbman_result_is_SCN(const struct dpaa2_dq *dq) in qbman_result_is_SCN() argument 313 return !qbman_result_is_DQ(dq); in qbman_result_is_SCN() 317 static inline int qbman_result_is_FQDAN(const struct dpaa2_dq *dq) in qbman_result_is_FQDAN() argument 319 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_FQDAN); in qbman_result_is_FQDAN() 323 static inline int qbman_result_is_CDAN(const struct dpaa2_dq *dq) in qbman_result_is_CDAN() argument 325 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_CDAN); in qbman_result_is_CDAN() [all …]
|
| /OK3568_Linux_fs/buildroot/dl/sox/git/src/ |
| H A D | g721.c | 87 short dq, i; in g721_encoder() local 113 dq = reconstruct(i & 8, _dqlntab[i], y); /* quantized est diff */ in g721_encoder() 115 sr = (dq < 0) ? se - (dq & 0x3FFF) : se + dq; /* reconst. signal */ in g721_encoder() 119 update(4, y, _witab[i] << 5, _fitab[i], dq, sr, dqsez, state_ptr); in g721_encoder() 138 short dq; in g721_decoder() local 149 dq = reconstruct(i & 0x08, _dqlntab[i], y); /* quantized diff. */ in g721_decoder() 151 sr = (dq < 0) ? (se - (dq & 0x3FFF)) : se + dq; /* reconst. signal */ in g721_decoder() 155 update(4, y, _witab[i] << 5, _fitab[i], dq, sr, dqsez, state_ptr); in g721_decoder()
|
| H A D | g723_24.c | 74 short dq, i; in g723_24_encoder() local 100 dq = reconstruct(i & 4, _dqlntab[i], y); /* quantized diff. */ in g723_24_encoder() 102 sr = (dq < 0) ? se - (dq & 0x3FFF) : se + dq; /* reconstructed signal */ in g723_24_encoder() 106 update(3, y, _witab[i], _fitab[i], dq, sr, dqsez, state_ptr); in g723_24_encoder() 123 short dq; in g723_24_decoder() local 133 dq = reconstruct(i & 0x04, _dqlntab[i], y); /* unquantize pred diff */ in g723_24_decoder() 135 sr = (dq < 0) ? (se - (dq & 0x3FFF)) : (se + dq); /* reconst. signal */ in g723_24_decoder() 139 update(3, y, _witab[i], _fitab[i], dq, sr, dqsez, state_ptr); in g723_24_decoder()
|
| H A D | g723_40.c | 93 short dq, i; in g723_40_encoder() local 120 dq = reconstruct(i & 0x10, _dqlntab[i], y); /* quantized diff */ in g723_40_encoder() 122 sr = (dq < 0) ? se - (dq & 0x7FFF) : se + dq; /* reconstructed signal */ in g723_40_encoder() 126 update(5, y, _witab[i], _fitab[i], dq, sr, dqsez, state_ptr); in g723_40_encoder() 143 short dq; in g723_40_decoder() local 153 dq = reconstruct(i & 0x10, _dqlntab[i], y); /* estimation diff. */ in g723_40_decoder() 155 sr = (dq < 0) ? (se - (dq & 0x7FFF)) : (se + dq); /* reconst. signal */ in g723_40_decoder() 159 update(5, y, _witab[i], _fitab[i], dq, sr, dqsez, state_ptr); in g723_40_decoder()
|
| H A D | g72x.c | 158 state_ptr->dq[cnta] = 32; in g72x_init_state() 174 sezi = fmult(state_ptr->b[0] >> 2, state_ptr->dq[0]); in predictor_zero() 176 sezi += fmult(state_ptr->b[i] >> 2, state_ptr->dq[i]); in predictor_zero() 276 short dq; /* Reconstructed difference signal sample */ in reconstruct() local 285 dq = (dqt << 7) >> (14 - dex); in reconstruct() 286 return ((sign) ? (dq - 0x8000) : dq); in reconstruct() 296 void update(int code_size, int y, int wi, int fi, int dq, int sr, in update() argument 312 mag = dq & 0x7FFF; /* prediction difference magnitude */ in update() 415 if (dq & 0x7FFF) { /* XOR */ in update() 416 if ((dq ^ state_ptr->dq[cnt]) >= 0) in update() [all …]
|
| /OK3568_Linux_fs/u-boot/cmd/ddr_tool/ddr_dq_eye/ |
| H A D | ddr_dq_eye.c | 40 u8 dq; in calc_print_border() local 53 for (dq = 0; dq < 8; dq++) { in calc_print_border() 54 if (result->dqs[dqs].dq_min[dq] < far_left) in calc_print_border() 55 far_left = result->dqs[dqs].dq_min[dq]; in calc_print_border() 56 if (result->dqs[dqs].dq_max[dq] > far_right) in calc_print_border() 57 far_right = result->dqs[dqs].dq_max[dq]; in calc_print_border() 99 u8 dq; in print_ddr_dq_eye() local 107 for (dq = 0; dq < 8; dq++) { in print_ddr_dq_eye() 109 result->dqs[dqs].dq_deskew[dq]; in print_ddr_dq_eye() 110 min = result->dqs[dqs].dq_min[dq]; in print_ddr_dq_eye() [all …]
|
| /OK3568_Linux_fs/kernel/crypto/async_tx/ |
| H A D | async_raid6_recov.c | 208 struct page *p, *q, *g, *dp, *dq; in __2data_recov_5() local 244 dq = blocks[failb]; in __2data_recov_5() 250 tx = async_mult(dq, dq_off, g, g_off, in __2data_recov_5() 263 srcs[0] = dq; in __2data_recov_5() 269 tx = async_xor_offs(dq, dq_off, srcs, src_offs, 2, bytes, submit); in __2data_recov_5() 274 srcs[1] = dq; in __2data_recov_5() 279 tx = async_sum_product(dq, dq_off, srcs, src_offs, coef, bytes, submit); in __2data_recov_5() 284 srcs[1] = dq; in __2data_recov_5() 299 struct page *p, *q, *dp, *dq; in __2data_recov_n() local 323 dq = blocks[failb]; in __2data_recov_n() [all …]
|
| /OK3568_Linux_fs/u-boot/drivers/net/fsl-mc/dpio/ |
| H A D | qbman_portal.c | 375 const struct ldpaa_dq *dq; in qbman_swp_dqrr_next() local 378 dq = qbman_cena_read(&s->sys, QBMAN_CENA_SWP_DQRR(s->dqrr.next_idx)); in qbman_swp_dqrr_next() 379 p = qb_cl(dq); in qbman_swp_dqrr_next() 405 flags = ldpaa_dq_flags(dq); in qbman_swp_dqrr_next() 414 return dq; in qbman_swp_dqrr_next() 418 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct ldpaa_dq *dq) in qbman_swp_dqrr_consume() argument 420 qbman_cinh_write(&s->sys, QBMAN_CINH_SWP_DCAP, QBMAN_IDX_FROM_DQRR(dq)); in qbman_swp_dqrr_consume() 427 void qbman_dq_entry_set_oldtoken(struct ldpaa_dq *dq, in qbman_dq_entry_set_oldtoken() argument 431 memset(dq, oldtoken, num_entries * sizeof(*dq)); in qbman_dq_entry_set_oldtoken() 435 const struct ldpaa_dq *dq, in qbman_dq_entry_has_newtoken() argument [all …]
|
| /OK3568_Linux_fs/kernel/drivers/net/ethernet/cavium/liquidio/ |
| H A D | cn66xx_regs.h | 311 #define CN6XXX_DMA_CNT(dq) \ argument 312 (CN6XXX_DMA_CNT_START + ((dq) * CN6XXX_DMA_OFFSET)) 314 #define CN6XXX_DMA_INT_LEVEL(dq) \ argument 315 (CN6XXX_DMA_INT_LEVEL_START + ((dq) * CN6XXX_DMA_OFFSET)) 317 #define CN6XXX_DMA_PKT_INT_LEVEL(dq) \ argument 318 (CN6XXX_DMA_INT_LEVEL_START + ((dq) * CN6XXX_DMA_OFFSET)) 320 #define CN6XXX_DMA_TIME_INT_LEVEL(dq) \ argument 321 (CN6XXX_DMA_INT_LEVEL_START + 4 + ((dq) * CN6XXX_DMA_OFFSET)) 323 #define CN6XXX_DMA_TIM(dq) \ argument 324 (CN6XXX_DMA_TIM_START + ((dq) * CN6XXX_DMA_OFFSET))
|
| H A D | cn23xx_pf_regs.h | 366 #define CN23XX_DMA_CNT(dq) \ argument 367 (CN23XX_DMA_CNT_START + ((dq) * CN23XX_DMA_OFFSET)) 369 #define CN23XX_DMA_INT_LEVEL(dq) \ argument 370 (CN23XX_DMA_INT_LEVEL_START + ((dq) * CN23XX_DMA_OFFSET)) 372 #define CN23XX_DMA_PKT_INT_LEVEL(dq) \ argument 373 (CN23XX_DMA_INT_LEVEL_START + ((dq) * CN23XX_DMA_OFFSET)) 375 #define CN23XX_DMA_TIME_INT_LEVEL(dq) \ argument 376 (CN23XX_DMA_INT_LEVEL_START + 4 + ((dq) * CN23XX_DMA_OFFSET)) 378 #define CN23XX_DMA_TIM(dq) \ argument 379 (CN23XX_DMA_TIM_START + ((dq) * CN23XX_DMA_OFFSET))
|
| /OK3568_Linux_fs/kernel/fs/xfs/ |
| H A D | xfs_dquot.c | 70 struct xfs_dquot *dq) in xfs_qm_adjust_dqlimits() argument 72 struct xfs_mount *mp = dq->q_mount; in xfs_qm_adjust_dqlimits() 77 ASSERT(dq->q_id); in xfs_qm_adjust_dqlimits() 78 defq = xfs_get_defquota(q, xfs_dquot_type(dq)); in xfs_qm_adjust_dqlimits() 80 if (!dq->q_blk.softlimit) { in xfs_qm_adjust_dqlimits() 81 dq->q_blk.softlimit = defq->blk.soft; in xfs_qm_adjust_dqlimits() 84 if (!dq->q_blk.hardlimit) { in xfs_qm_adjust_dqlimits() 85 dq->q_blk.hardlimit = defq->blk.hard; in xfs_qm_adjust_dqlimits() 88 if (!dq->q_ino.softlimit) in xfs_qm_adjust_dqlimits() 89 dq->q_ino.softlimit = defq->ino.soft; in xfs_qm_adjust_dqlimits() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/s390/crypto/ |
| H A D | zcrypt_msgtype50.c | 108 unsigned char dq[64]; member 121 unsigned char dq[128]; member 134 unsigned char dq[256]; member 272 unsigned char *p, *q, *dp, *dq, *u, *inp; in ICACRT_msg_to_type50CRT_msg() local 294 dq = crb1->dq + sizeof(crb1->dq) - short_len; in ICACRT_msg_to_type50CRT_msg() 308 dq = crb2->dq + sizeof(crb2->dq) - short_len; in ICACRT_msg_to_type50CRT_msg() 323 dq = crb3->dq + sizeof(crb3->dq) - short_len; in ICACRT_msg_to_type50CRT_msg() 336 copy_from_user(dq, crt->bq_key, short_len) || in ICACRT_msg_to_type50CRT_msg()
|
| /OK3568_Linux_fs/u-boot/include/fsl-mc/ |
| H A D | fsl_dpaa_fd.h | 101 static inline int ldpaa_dq_is_pull(const struct ldpaa_dq *dq) in ldpaa_dq_is_pull() argument 103 return (int)(ldpaa_dq_flags(dq) & LDPAA_DQ_STAT_VOLATILE); in ldpaa_dq_is_pull() 106 const struct ldpaa_dq *dq) in ldpaa_dq_is_pull_complete() argument 108 return (int)(ldpaa_dq_flags(dq) & LDPAA_DQ_STAT_EXPIRED); in ldpaa_dq_is_pull_complete()
|