Skip to content

Commit e37afcb

Browse files
shirazsaleemrleon
authored andcommitted
RDMA/irdma: Harden depth calculation functions
An issue was exposed where OS can pass in U32_MAX for SQ/RQ/SRQ size. This can cause integer overflow and truncation of SQ/RQ/SRQ depth returning a success when it should have failed. Harden the functions to do all depth calculations and boundary checking in u64 sizes. Fixes: 563e1fe ("RDMA/irdma: Add SRQ support") Signed-off-by: Shiraz Saleem <[email protected]> Signed-off-by: Tatyana Nikolova <[email protected]> Signed-off-by: Leon Romanovsky <[email protected]>
1 parent 7221f58 commit e37afcb

1 file changed

Lines changed: 22 additions & 17 deletions

File tree

  • drivers/infiniband/hw/irdma

drivers/infiniband/hw/irdma/uk.c

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1438,7 +1438,7 @@ int irdma_uk_cq_poll_cmpl(struct irdma_cq_uk *cq,
14381438
* irdma_round_up_wq - return round up qp wq depth
14391439
* @wqdepth: wq depth in quanta to round up
14401440
*/
1441-
static int irdma_round_up_wq(u32 wqdepth)
1441+
static u64 irdma_round_up_wq(u64 wqdepth)
14421442
{
14431443
int scount = 1;
14441444

@@ -1491,15 +1491,16 @@ void irdma_get_wqe_shift(struct irdma_uk_attrs *uk_attrs, u32 sge,
14911491
int irdma_get_sqdepth(struct irdma_uk_attrs *uk_attrs, u32 sq_size, u8 shift,
14921492
u32 *sqdepth)
14931493
{
1494-
u32 min_size = (u32)uk_attrs->min_hw_wq_size << shift;
1494+
u32 min_hw_quanta = (u32)uk_attrs->min_hw_wq_size << shift;
1495+
u64 hw_quanta =
1496+
irdma_round_up_wq(((u64)sq_size << shift) + IRDMA_SQ_RSVD);
14951497

1496-
*sqdepth = irdma_round_up_wq((sq_size << shift) + IRDMA_SQ_RSVD);
1497-
1498-
if (*sqdepth < min_size)
1499-
*sqdepth = min_size;
1500-
else if (*sqdepth > uk_attrs->max_hw_wq_quanta)
1498+
if (hw_quanta < min_hw_quanta)
1499+
hw_quanta = min_hw_quanta;
1500+
else if (hw_quanta > uk_attrs->max_hw_wq_quanta)
15011501
return -EINVAL;
15021502

1503+
*sqdepth = hw_quanta;
15031504
return 0;
15041505
}
15051506

@@ -1513,15 +1514,16 @@ int irdma_get_sqdepth(struct irdma_uk_attrs *uk_attrs, u32 sq_size, u8 shift,
15131514
int irdma_get_rqdepth(struct irdma_uk_attrs *uk_attrs, u32 rq_size, u8 shift,
15141515
u32 *rqdepth)
15151516
{
1516-
u32 min_size = (u32)uk_attrs->min_hw_wq_size << shift;
1517-
1518-
*rqdepth = irdma_round_up_wq((rq_size << shift) + IRDMA_RQ_RSVD);
1517+
u32 min_hw_quanta = (u32)uk_attrs->min_hw_wq_size << shift;
1518+
u64 hw_quanta =
1519+
irdma_round_up_wq(((u64)rq_size << shift) + IRDMA_RQ_RSVD);
15191520

1520-
if (*rqdepth < min_size)
1521-
*rqdepth = min_size;
1522-
else if (*rqdepth > uk_attrs->max_hw_rq_quanta)
1521+
if (hw_quanta < min_hw_quanta)
1522+
hw_quanta = min_hw_quanta;
1523+
else if (hw_quanta > uk_attrs->max_hw_rq_quanta)
15231524
return -EINVAL;
15241525

1526+
*rqdepth = hw_quanta;
15251527
return 0;
15261528
}
15271529

@@ -1535,13 +1537,16 @@ int irdma_get_rqdepth(struct irdma_uk_attrs *uk_attrs, u32 rq_size, u8 shift,
15351537
int irdma_get_srqdepth(struct irdma_uk_attrs *uk_attrs, u32 srq_size, u8 shift,
15361538
u32 *srqdepth)
15371539
{
1538-
*srqdepth = irdma_round_up_wq((srq_size << shift) + IRDMA_RQ_RSVD);
1540+
u32 min_hw_quanta = (u32)uk_attrs->min_hw_wq_size << shift;
1541+
u64 hw_quanta =
1542+
irdma_round_up_wq(((u64)srq_size << shift) + IRDMA_RQ_RSVD);
15391543

1540-
if (*srqdepth < ((u32)uk_attrs->min_hw_wq_size << shift))
1541-
*srqdepth = uk_attrs->min_hw_wq_size << shift;
1542-
else if (*srqdepth > uk_attrs->max_hw_srq_quanta)
1544+
if (hw_quanta < min_hw_quanta)
1545+
hw_quanta = min_hw_quanta;
1546+
else if (hw_quanta > uk_attrs->max_hw_srq_quanta)
15431547
return -EINVAL;
15441548

1549+
*srqdepth = hw_quanta;
15451550
return 0;
15461551
}
15471552

0 commit comments

Comments
 (0)