Commit 86fa8af6 authored by Christoph Hellwig's avatar Christoph Hellwig Committed by Alex Elder

xfs: clean up the xfs_alloc_compute_aligned calling convention

Pass a xfs_alloc_arg structure to xfs_alloc_compute_aligned and derive
the alignment and minlen paramters from it.  This cleans up the existing
callers, and we'll need even more information from the xfs_alloc_arg
in subsequent patches.  Based on a patch from Dave Chinner.
Signed-off-by: default avatarChristoph Hellwig <hch@lst.de>
Reviewed-by: default avatarDave Chinner <dchinner@redhat.com>
Signed-off-by: default avatarAlex Elder <aelder@sgi.com>
parent 9130090b
...@@ -147,10 +147,9 @@ xfs_alloc_get_rec( ...@@ -147,10 +147,9 @@ xfs_alloc_get_rec(
*/ */
STATIC void STATIC void
xfs_alloc_compute_aligned( xfs_alloc_compute_aligned(
xfs_alloc_arg_t *args, /* allocation argument structure */
xfs_agblock_t foundbno, /* starting block in found extent */ xfs_agblock_t foundbno, /* starting block in found extent */
xfs_extlen_t foundlen, /* length in found extent */ xfs_extlen_t foundlen, /* length in found extent */
xfs_extlen_t alignment, /* alignment for allocation */
xfs_extlen_t minlen, /* minimum length for allocation */
xfs_agblock_t *resbno, /* result block number */ xfs_agblock_t *resbno, /* result block number */
xfs_extlen_t *reslen) /* result length */ xfs_extlen_t *reslen) /* result length */
{ {
...@@ -158,8 +157,8 @@ xfs_alloc_compute_aligned( ...@@ -158,8 +157,8 @@ xfs_alloc_compute_aligned(
xfs_extlen_t diff; xfs_extlen_t diff;
xfs_extlen_t len; xfs_extlen_t len;
if (alignment > 1 && foundlen >= minlen) { if (args->alignment > 1 && foundlen >= args->minlen) {
bno = roundup(foundbno, alignment); bno = roundup(foundbno, args->alignment);
diff = bno - foundbno; diff = bno - foundbno;
len = diff >= foundlen ? 0 : foundlen - diff; len = diff >= foundlen ? 0 : foundlen - diff;
} else { } else {
...@@ -693,8 +692,7 @@ xfs_alloc_find_best_extent( ...@@ -693,8 +692,7 @@ xfs_alloc_find_best_extent(
if (error) if (error)
goto error0; goto error0;
XFS_WANT_CORRUPTED_GOTO(i == 1, error0); XFS_WANT_CORRUPTED_GOTO(i == 1, error0);
xfs_alloc_compute_aligned(*sbno, *slen, args->alignment, xfs_alloc_compute_aligned(args, *sbno, *slen, &bno, slena);
args->minlen, &bno, slena);
/* /*
* The good extent is closer than this one. * The good extent is closer than this one.
...@@ -866,8 +864,8 @@ xfs_alloc_ag_vextent_near( ...@@ -866,8 +864,8 @@ xfs_alloc_ag_vextent_near(
if ((error = xfs_alloc_get_rec(cnt_cur, &ltbno, &ltlen, &i))) if ((error = xfs_alloc_get_rec(cnt_cur, &ltbno, &ltlen, &i)))
goto error0; goto error0;
XFS_WANT_CORRUPTED_GOTO(i == 1, error0); XFS_WANT_CORRUPTED_GOTO(i == 1, error0);
xfs_alloc_compute_aligned(ltbno, ltlen, args->alignment, xfs_alloc_compute_aligned(args, ltbno, ltlen,
args->minlen, &ltbnoa, &ltlena); &ltbnoa, &ltlena);
if (ltlena < args->minlen) if (ltlena < args->minlen)
continue; continue;
args->len = XFS_EXTLEN_MIN(ltlena, args->maxlen); args->len = XFS_EXTLEN_MIN(ltlena, args->maxlen);
...@@ -987,8 +985,8 @@ xfs_alloc_ag_vextent_near( ...@@ -987,8 +985,8 @@ xfs_alloc_ag_vextent_near(
if ((error = xfs_alloc_get_rec(bno_cur_lt, &ltbno, &ltlen, &i))) if ((error = xfs_alloc_get_rec(bno_cur_lt, &ltbno, &ltlen, &i)))
goto error0; goto error0;
XFS_WANT_CORRUPTED_GOTO(i == 1, error0); XFS_WANT_CORRUPTED_GOTO(i == 1, error0);
xfs_alloc_compute_aligned(ltbno, ltlen, args->alignment, xfs_alloc_compute_aligned(args, ltbno, ltlen,
args->minlen, &ltbnoa, &ltlena); &ltbnoa, &ltlena);
if (ltlena >= args->minlen) if (ltlena >= args->minlen)
break; break;
if ((error = xfs_btree_decrement(bno_cur_lt, 0, &i))) if ((error = xfs_btree_decrement(bno_cur_lt, 0, &i)))
...@@ -1003,8 +1001,8 @@ xfs_alloc_ag_vextent_near( ...@@ -1003,8 +1001,8 @@ xfs_alloc_ag_vextent_near(
if ((error = xfs_alloc_get_rec(bno_cur_gt, &gtbno, &gtlen, &i))) if ((error = xfs_alloc_get_rec(bno_cur_gt, &gtbno, &gtlen, &i)))
goto error0; goto error0;
XFS_WANT_CORRUPTED_GOTO(i == 1, error0); XFS_WANT_CORRUPTED_GOTO(i == 1, error0);
xfs_alloc_compute_aligned(gtbno, gtlen, args->alignment, xfs_alloc_compute_aligned(args, gtbno, gtlen,
args->minlen, &gtbnoa, &gtlena); &gtbnoa, &gtlena);
if (gtlena >= args->minlen) if (gtlena >= args->minlen)
break; break;
if ((error = xfs_btree_increment(bno_cur_gt, 0, &i))) if ((error = xfs_btree_increment(bno_cur_gt, 0, &i)))
...@@ -1183,8 +1181,7 @@ xfs_alloc_ag_vextent_size( ...@@ -1183,8 +1181,7 @@ xfs_alloc_ag_vextent_size(
* once aligned; if not, we search left for something better. * once aligned; if not, we search left for something better.
* This can't happen in the second case above. * This can't happen in the second case above.
*/ */
xfs_alloc_compute_aligned(fbno, flen, args->alignment, args->minlen, xfs_alloc_compute_aligned(args, fbno, flen, &rbno, &rlen);
&rbno, &rlen);
rlen = XFS_EXTLEN_MIN(args->maxlen, rlen); rlen = XFS_EXTLEN_MIN(args->maxlen, rlen);
XFS_WANT_CORRUPTED_GOTO(rlen == 0 || XFS_WANT_CORRUPTED_GOTO(rlen == 0 ||
(rlen <= flen && rbno + rlen <= fbno + flen), error0); (rlen <= flen && rbno + rlen <= fbno + flen), error0);
...@@ -1209,8 +1206,8 @@ xfs_alloc_ag_vextent_size( ...@@ -1209,8 +1206,8 @@ xfs_alloc_ag_vextent_size(
XFS_WANT_CORRUPTED_GOTO(i == 1, error0); XFS_WANT_CORRUPTED_GOTO(i == 1, error0);
if (flen < bestrlen) if (flen < bestrlen)
break; break;
xfs_alloc_compute_aligned(fbno, flen, args->alignment, xfs_alloc_compute_aligned(args, fbno, flen,
args->minlen, &rbno, &rlen); &rbno, &rlen);
rlen = XFS_EXTLEN_MIN(args->maxlen, rlen); rlen = XFS_EXTLEN_MIN(args->maxlen, rlen);
XFS_WANT_CORRUPTED_GOTO(rlen == 0 || XFS_WANT_CORRUPTED_GOTO(rlen == 0 ||
(rlen <= flen && rbno + rlen <= fbno + flen), (rlen <= flen && rbno + rlen <= fbno + flen),
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment