Commit 84ca4e54 authored by Pascal van Leeuwen's avatar Pascal van Leeuwen Committed by Herbert Xu

crypto: inside-secure - Add support for 256 bit wide internal bus

This patch adds support for large EIP197's with a 256 bit wide internal
bus, which affects the format of the result descriptor due to internal
alignment requirements.
Signed-off-by: default avatarPascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: default avatarHerbert Xu <herbert@gondor.apana.org.au>
parent a9a89624
...@@ -492,12 +492,12 @@ static int safexcel_hw_setup_cdesc_rings(struct safexcel_crypto_priv *priv) ...@@ -492,12 +492,12 @@ static int safexcel_hw_setup_cdesc_rings(struct safexcel_crypto_priv *priv)
writel(upper_32_bits(priv->ring[i].cdr.base_dma), writel(upper_32_bits(priv->ring[i].cdr.base_dma),
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI); EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.cd_offset << 16) | writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.cd_offset << 14) |
priv->config.cd_size, priv->config.cd_size,
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE); EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
writel(((cd_fetch_cnt * writel(((cd_fetch_cnt *
(cd_size_rnd << priv->hwconfig.hwdataw)) << 16) | (cd_size_rnd << priv->hwconfig.hwdataw)) << 16) |
(cd_fetch_cnt * priv->config.cd_offset), (cd_fetch_cnt * (priv->config.cd_offset / sizeof(u32))),
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_CFG); EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_CFG);
/* Configure DMA tx control */ /* Configure DMA tx control */
...@@ -540,13 +540,13 @@ static int safexcel_hw_setup_rdesc_rings(struct safexcel_crypto_priv *priv) ...@@ -540,13 +540,13 @@ static int safexcel_hw_setup_rdesc_rings(struct safexcel_crypto_priv *priv)
writel(upper_32_bits(priv->ring[i].rdr.base_dma), writel(upper_32_bits(priv->ring[i].rdr.base_dma),
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI); EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 16) | writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 14) |
priv->config.rd_size, priv->config.rd_size,
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE); EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
writel(((rd_fetch_cnt * writel(((rd_fetch_cnt *
(rd_size_rnd << priv->hwconfig.hwdataw)) << 16) | (rd_size_rnd << priv->hwconfig.hwdataw)) << 16) |
(rd_fetch_cnt * priv->config.rd_offset), (rd_fetch_cnt * (priv->config.rd_offset / sizeof(u32))),
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_CFG); EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_CFG);
/* Configure DMA tx control */ /* Configure DMA tx control */
...@@ -572,7 +572,7 @@ static int safexcel_hw_setup_rdesc_rings(struct safexcel_crypto_priv *priv) ...@@ -572,7 +572,7 @@ static int safexcel_hw_setup_rdesc_rings(struct safexcel_crypto_priv *priv)
static int safexcel_hw_init(struct safexcel_crypto_priv *priv) static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
{ {
u32 val; u32 val;
int i, ret, pe; int i, ret, pe, opbuflo, opbufhi;
dev_dbg(priv->dev, "HW init: using %d pipe(s) and %d ring(s)\n", dev_dbg(priv->dev, "HW init: using %d pipe(s) and %d ring(s)\n",
priv->config.pes, priv->config.rings); priv->config.pes, priv->config.rings);
...@@ -652,9 +652,16 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv) ...@@ -652,9 +652,16 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
; ;
/* DMA transfer size to use */ /* DMA transfer size to use */
if (priv->hwconfig.hwnumpes > 4) {
opbuflo = 9;
opbufhi = 10;
} else {
opbuflo = 7;
opbufhi = 8;
}
val = EIP197_HIA_DSE_CFG_DIS_DEBUG; val = EIP197_HIA_DSE_CFG_DIS_DEBUG;
val |= EIP197_HIA_DxE_CFG_MIN_DATA_SIZE(7) | val |= EIP197_HIA_DxE_CFG_MIN_DATA_SIZE(opbuflo) |
EIP197_HIA_DxE_CFG_MAX_DATA_SIZE(8); EIP197_HIA_DxE_CFG_MAX_DATA_SIZE(opbufhi);
val |= EIP197_HIA_DxE_CFG_DATA_CACHE_CTRL(WR_CACHE_3BITS); val |= EIP197_HIA_DxE_CFG_DATA_CACHE_CTRL(WR_CACHE_3BITS);
val |= EIP197_HIA_DSE_CFG_ALWAYS_BUFFERABLE; val |= EIP197_HIA_DSE_CFG_ALWAYS_BUFFERABLE;
/* FIXME: instability issues can occur for EIP97 but disabling /* FIXME: instability issues can occur for EIP97 but disabling
...@@ -668,8 +675,8 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv) ...@@ -668,8 +675,8 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
writel(0, EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_CTRL(pe)); writel(0, EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_CTRL(pe));
/* Configure the procesing engine thresholds */ /* Configure the procesing engine thresholds */
writel(EIP197_PE_OUT_DBUF_THRES_MIN(7) | writel(EIP197_PE_OUT_DBUF_THRES_MIN(opbuflo) |
EIP197_PE_OUT_DBUF_THRES_MAX(8), EIP197_PE_OUT_DBUF_THRES_MAX(opbufhi),
EIP197_PE(priv) + EIP197_PE_OUT_DBUF_THRES(pe)); EIP197_PE(priv) + EIP197_PE_OUT_DBUF_THRES(pe));
/* Processing Engine configuration */ /* Processing Engine configuration */
...@@ -709,7 +716,7 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv) ...@@ -709,7 +716,7 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
writel(0, writel(0,
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR); EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset) << 2, writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset),
EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_SIZE); EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
} }
...@@ -732,7 +739,7 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv) ...@@ -732,7 +739,7 @@ static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR); EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
/* Ring size */ /* Ring size */
writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset) << 2, writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset),
EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_SIZE); EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
} }
...@@ -852,20 +859,24 @@ void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring) ...@@ -852,20 +859,24 @@ void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring)
spin_unlock_bh(&priv->ring[ring].lock); spin_unlock_bh(&priv->ring[ring].lock);
/* let the RDR know we have pending descriptors */ /* let the RDR know we have pending descriptors */
writel((rdesc * priv->config.rd_offset) << 2, writel((rdesc * priv->config.rd_offset),
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT); EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
/* let the CDR know we have pending descriptors */ /* let the CDR know we have pending descriptors */
writel((cdesc * priv->config.cd_offset) << 2, writel((cdesc * priv->config.cd_offset),
EIP197_HIA_CDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT); EIP197_HIA_CDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
} }
inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv, inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
struct safexcel_result_desc *rdesc) void *rdp)
{ {
if (likely((!rdesc->descriptor_overflow) && struct safexcel_result_desc *rdesc = rdp;
(!rdesc->buffer_overflow) && struct result_data_desc *result_data = rdp + priv->config.res_offset;
(!rdesc->result_data.error_code)))
if (likely((!rdesc->last_seg) || /* Rest only valid if last seg! */
((!rdesc->descriptor_overflow) &&
(!rdesc->buffer_overflow) &&
(!result_data->error_code))))
return 0; return 0;
if (rdesc->descriptor_overflow) if (rdesc->descriptor_overflow)
...@@ -874,13 +885,14 @@ inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv, ...@@ -874,13 +885,14 @@ inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
if (rdesc->buffer_overflow) if (rdesc->buffer_overflow)
dev_err(priv->dev, "Buffer overflow detected"); dev_err(priv->dev, "Buffer overflow detected");
if (rdesc->result_data.error_code & 0x4066) { if (result_data->error_code & 0x4066) {
/* Fatal error (bits 1,2,5,6 & 14) */ /* Fatal error (bits 1,2,5,6 & 14) */
dev_err(priv->dev, dev_err(priv->dev,
"result descriptor error (%x)", "result descriptor error (%x)",
rdesc->result_data.error_code); result_data->error_code);
return -EIO; return -EIO;
} else if (rdesc->result_data.error_code & } else if (result_data->error_code &
(BIT(7) | BIT(4) | BIT(3) | BIT(0))) { (BIT(7) | BIT(4) | BIT(3) | BIT(0))) {
/* /*
* Give priority over authentication fails: * Give priority over authentication fails:
...@@ -888,7 +900,7 @@ inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv, ...@@ -888,7 +900,7 @@ inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
* something wrong with the input! * something wrong with the input!
*/ */
return -EINVAL; return -EINVAL;
} else if (rdesc->result_data.error_code & BIT(9)) { } else if (result_data->error_code & BIT(9)) {
/* Authentication failed */ /* Authentication failed */
return -EBADMSG; return -EBADMSG;
} }
...@@ -1019,7 +1031,7 @@ static inline void safexcel_handle_result_descriptor(struct safexcel_crypto_priv ...@@ -1019,7 +1031,7 @@ static inline void safexcel_handle_result_descriptor(struct safexcel_crypto_priv
acknowledge: acknowledge:
if (i) if (i)
writel(EIP197_xDR_PROC_xD_PKT(i) | writel(EIP197_xDR_PROC_xD_PKT(i) |
EIP197_xDR_PROC_xD_COUNT(tot_descs * priv->config.rd_offset), (tot_descs * priv->config.rd_offset),
EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT); EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT);
/* If the number of requests overflowed the counter, try to proceed more /* If the number of requests overflowed the counter, try to proceed more
...@@ -1292,30 +1304,25 @@ static void safexcel_unregister_algorithms(struct safexcel_crypto_priv *priv) ...@@ -1292,30 +1304,25 @@ static void safexcel_unregister_algorithms(struct safexcel_crypto_priv *priv)
static void safexcel_configure(struct safexcel_crypto_priv *priv) static void safexcel_configure(struct safexcel_crypto_priv *priv)
{ {
u32 val, mask = 0; u32 mask = BIT(priv->hwconfig.hwdataw) - 1;
val = readl(EIP197_HIA_AIC_G(priv) + EIP197_HIA_OPTIONS);
/* Read number of PEs from the engine */ priv->config.pes = priv->hwconfig.hwnumpes;
if (priv->flags & SAFEXCEL_HW_EIP197) priv->config.rings = min_t(u32, priv->hwconfig.hwnumrings, max_rings);
/* Wider field width for all EIP197 type engines */
mask = EIP197_N_PES_MASK;
else
/* Narrow field width for EIP97 type engine */
mask = EIP97_N_PES_MASK;
priv->config.pes = (val >> EIP197_N_PES_OFFSET) & mask;
priv->config.rings = min_t(u32, val & GENMASK(3, 0), max_rings);
val = (val & GENMASK(27, 25)) >> 25; priv->config.cd_size = EIP197_CD64_FETCH_SIZE;
mask = BIT(val) - 1;
priv->config.cd_size = (sizeof(struct safexcel_command_desc) / sizeof(u32));
priv->config.cd_offset = (priv->config.cd_size + mask) & ~mask; priv->config.cd_offset = (priv->config.cd_size + mask) & ~mask;
priv->config.rd_size = (sizeof(struct safexcel_result_desc) / sizeof(u32)); /* res token is behind the descr, but ofs must be rounded to buswdth */
priv->config.res_offset = (EIP197_RD64_FETCH_SIZE + mask) & ~mask;
/* now the size of the descr is this 1st part plus the result struct */
priv->config.rd_size = priv->config.res_offset +
EIP197_RD64_RESULT_SIZE;
priv->config.rd_offset = (priv->config.rd_size + mask) & ~mask; priv->config.rd_offset = (priv->config.rd_size + mask) & ~mask;
/* convert dwords to bytes */
priv->config.cd_offset *= sizeof(u32);
priv->config.rd_offset *= sizeof(u32);
priv->config.res_offset *= sizeof(u32);
} }
static void safexcel_init_register_offsets(struct safexcel_crypto_priv *priv) static void safexcel_init_register_offsets(struct safexcel_crypto_priv *priv)
...@@ -1457,6 +1464,10 @@ static int safexcel_probe_generic(void *pdev, ...@@ -1457,6 +1464,10 @@ static int safexcel_probe_generic(void *pdev,
priv->hwconfig.hwrfsize = ((hiaopt >> EIP197_RFSIZE_OFFSET) & priv->hwconfig.hwrfsize = ((hiaopt >> EIP197_RFSIZE_OFFSET) &
EIP197_RFSIZE_MASK) + EIP197_RFSIZE_MASK) +
EIP197_RFSIZE_ADJUST; EIP197_RFSIZE_ADJUST;
priv->hwconfig.hwnumpes = (hiaopt >> EIP197_N_PES_OFFSET) &
EIP197_N_PES_MASK;
priv->hwconfig.hwnumrings = (hiaopt >> EIP197_N_RINGS_OFFSET) &
EIP197_N_RINGS_MASK;
} else { } else {
/* EIP97 */ /* EIP97 */
priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) & priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) &
...@@ -1465,6 +1476,9 @@ static int safexcel_probe_generic(void *pdev, ...@@ -1465,6 +1476,9 @@ static int safexcel_probe_generic(void *pdev,
EIP97_CFSIZE_MASK; EIP97_CFSIZE_MASK;
priv->hwconfig.hwrfsize = (hiaopt >> EIP97_RFSIZE_OFFSET) & priv->hwconfig.hwrfsize = (hiaopt >> EIP97_RFSIZE_OFFSET) &
EIP97_RFSIZE_MASK; EIP97_RFSIZE_MASK;
priv->hwconfig.hwnumpes = 1; /* by definition */
priv->hwconfig.hwnumrings = (hiaopt >> EIP197_N_RINGS_OFFSET) &
EIP197_N_RINGS_MASK;
} }
/* Get supported algorithms from EIP96 transform engine */ /* Get supported algorithms from EIP96 transform engine */
...@@ -1472,8 +1486,9 @@ static int safexcel_probe_generic(void *pdev, ...@@ -1472,8 +1486,9 @@ static int safexcel_probe_generic(void *pdev,
EIP197_PE_EIP96_OPTIONS(0)); EIP197_PE_EIP96_OPTIONS(0));
/* Print single info line describing what we just detected */ /* Print single info line describing what we just detected */
dev_info(priv->dev, "EIP%d:%x(%d)-HIA:%x(%d,%d,%d),PE:%x,alg:%08x\n", dev_info(priv->dev, "EIP%d:%x(%d,%d,%d,%d)-HIA:%x(%d,%d,%d),PE:%x,alg:%08x\n",
peid, priv->hwconfig.hwver, hwctg, priv->hwconfig.hiaver, peid, priv->hwconfig.hwver, hwctg, priv->hwconfig.hwnumpes,
priv->hwconfig.hwnumrings, priv->hwconfig.hiaver,
priv->hwconfig.hwdataw, priv->hwconfig.hwcfsize, priv->hwconfig.hwdataw, priv->hwconfig.hwcfsize,
priv->hwconfig.hwrfsize, priv->hwconfig.pever, priv->hwconfig.hwrfsize, priv->hwconfig.pever,
priv->hwconfig.algo_flags); priv->hwconfig.algo_flags);
......
...@@ -213,7 +213,6 @@ ...@@ -213,7 +213,6 @@
/* EIP197_HIA_xDR_PROC_COUNT */ /* EIP197_HIA_xDR_PROC_COUNT */
#define EIP197_xDR_PROC_xD_PKT_OFFSET 24 #define EIP197_xDR_PROC_xD_PKT_OFFSET 24
#define EIP197_xDR_PROC_xD_PKT_MASK GENMASK(6, 0) #define EIP197_xDR_PROC_xD_PKT_MASK GENMASK(6, 0)
#define EIP197_xDR_PROC_xD_COUNT(n) ((n) << 2)
#define EIP197_xDR_PROC_xD_PKT(n) ((n) << 24) #define EIP197_xDR_PROC_xD_PKT(n) ((n) << 24)
#define EIP197_xDR_PROC_CLR_COUNT BIT(31) #define EIP197_xDR_PROC_CLR_COUNT BIT(31)
...@@ -228,6 +227,8 @@ ...@@ -228,6 +227,8 @@
#define EIP197_HIA_RA_PE_CTRL_EN BIT(30) #define EIP197_HIA_RA_PE_CTRL_EN BIT(30)
/* EIP197_HIA_OPTIONS */ /* EIP197_HIA_OPTIONS */
#define EIP197_N_RINGS_OFFSET 0
#define EIP197_N_RINGS_MASK GENMASK(3, 0)
#define EIP197_N_PES_OFFSET 4 #define EIP197_N_PES_OFFSET 4
#define EIP197_N_PES_MASK GENMASK(4, 0) #define EIP197_N_PES_MASK GENMASK(4, 0)
#define EIP97_N_PES_MASK GENMASK(2, 0) #define EIP97_N_PES_MASK GENMASK(2, 0)
...@@ -486,16 +487,15 @@ struct safexcel_result_desc { ...@@ -486,16 +487,15 @@ struct safexcel_result_desc {
u32 data_lo; u32 data_lo;
u32 data_hi; u32 data_hi;
struct result_data_desc result_data;
} __packed; } __packed;
/* /*
* The EIP(1)97 only needs to fetch the descriptor part of * The EIP(1)97 only needs to fetch the descriptor part of
* the result descriptor, not the result token part! * the result descriptor, not the result token part!
*/ */
#define EIP197_RD64_FETCH_SIZE ((sizeof(struct safexcel_result_desc) -\ #define EIP197_RD64_FETCH_SIZE (sizeof(struct safexcel_result_desc) /\
sizeof(struct result_data_desc)) /\ sizeof(u32))
#define EIP197_RD64_RESULT_SIZE (sizeof(struct result_data_desc) /\
sizeof(u32)) sizeof(u32))
struct safexcel_token { struct safexcel_token {
...@@ -582,6 +582,9 @@ struct safexcel_command_desc { ...@@ -582,6 +582,9 @@ struct safexcel_command_desc {
struct safexcel_control_data_desc control_data; struct safexcel_control_data_desc control_data;
} __packed; } __packed;
#define EIP197_CD64_FETCH_SIZE (sizeof(struct safexcel_command_desc) /\
sizeof(u32))
/* /*
* Internal structures & functions * Internal structures & functions
*/ */
...@@ -625,6 +628,7 @@ struct safexcel_config { ...@@ -625,6 +628,7 @@ struct safexcel_config {
u32 rd_size; u32 rd_size;
u32 rd_offset; u32 rd_offset;
u32 res_offset;
}; };
struct safexcel_work_data { struct safexcel_work_data {
...@@ -734,6 +738,8 @@ struct safexcel_hwconfig { ...@@ -734,6 +738,8 @@ struct safexcel_hwconfig {
int hwdataw; int hwdataw;
int hwcfsize; int hwcfsize;
int hwrfsize; int hwrfsize;
int hwnumpes;
int hwnumrings;
}; };
struct safexcel_crypto_priv { struct safexcel_crypto_priv {
...@@ -805,7 +811,7 @@ struct safexcel_inv_result { ...@@ -805,7 +811,7 @@ struct safexcel_inv_result {
void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring); void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring);
int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv, int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
struct safexcel_result_desc *rdesc); void *rdp);
void safexcel_complete(struct safexcel_crypto_priv *priv, int ring); void safexcel_complete(struct safexcel_crypto_priv *priv, int ring);
int safexcel_invalidate_cache(struct crypto_async_request *async, int safexcel_invalidate_cache(struct crypto_async_request *async,
struct safexcel_crypto_priv *priv, struct safexcel_crypto_priv *priv,
......
...@@ -14,7 +14,7 @@ int safexcel_init_ring_descriptors(struct safexcel_crypto_priv *priv, ...@@ -14,7 +14,7 @@ int safexcel_init_ring_descriptors(struct safexcel_crypto_priv *priv,
struct safexcel_desc_ring *cdr, struct safexcel_desc_ring *cdr,
struct safexcel_desc_ring *rdr) struct safexcel_desc_ring *rdr)
{ {
cdr->offset = sizeof(u32) * priv->config.cd_offset; cdr->offset = priv->config.cd_offset;
cdr->base = dmam_alloc_coherent(priv->dev, cdr->base = dmam_alloc_coherent(priv->dev,
cdr->offset * EIP197_DEFAULT_RING_SIZE, cdr->offset * EIP197_DEFAULT_RING_SIZE,
&cdr->base_dma, GFP_KERNEL); &cdr->base_dma, GFP_KERNEL);
...@@ -24,7 +24,7 @@ int safexcel_init_ring_descriptors(struct safexcel_crypto_priv *priv, ...@@ -24,7 +24,7 @@ int safexcel_init_ring_descriptors(struct safexcel_crypto_priv *priv,
cdr->base_end = cdr->base + cdr->offset * (EIP197_DEFAULT_RING_SIZE - 1); cdr->base_end = cdr->base + cdr->offset * (EIP197_DEFAULT_RING_SIZE - 1);
cdr->read = cdr->base; cdr->read = cdr->base;
rdr->offset = sizeof(u32) * priv->config.rd_offset; rdr->offset = priv->config.rd_offset;
rdr->base = dmam_alloc_coherent(priv->dev, rdr->base = dmam_alloc_coherent(priv->dev,
rdr->offset * EIP197_DEFAULT_RING_SIZE, rdr->offset * EIP197_DEFAULT_RING_SIZE,
&rdr->base_dma, GFP_KERNEL); &rdr->base_dma, GFP_KERNEL);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment