@@ -309,7 +309,8 @@ xfs_alloc_fixup_trees(
xfs_extlen_t flen, /* length of free extent */
xfs_agblock_t rbno, /* starting block of returned extent */
xfs_extlen_t rlen, /* length of returned extent */
- int flags) /* flags, XFSA_FIXUP_... */
+ int flags, /* flags, XFSA_FIXUP_... */
+ int isfl)
{
int error; /* error code */
int i; /* operation results */
@@ -376,15 +377,27 @@ xfs_alloc_fixup_trees(
nfbno1 = rbno + rlen;
nflen1 = flen - rlen;
nfbno2 = NULLAGBLOCK;
+ if (isfl)
+ xfs_warn(mp,
+ "Case 1 Inserting: nfbno1: %d, nflen1: %d, nfbno2: %d, nflen2: %d, fbno: %d, flen: %d, rbno: %d, rlen: %d",
+ nfbno1, nflen1, nfbno2, nflen2, fbno, flen, rbno, rlen);
} else if (rbno + rlen == fbno + flen) {
nfbno1 = fbno;
nflen1 = flen - rlen;
nfbno2 = NULLAGBLOCK;
+ if (isfl)
+ xfs_warn(mp,
+ "Case 2 Inserting: nfbno1: %d, nflen1: %d, nfbno2: %d, nflen2: %d, fbno: %d, flen: %d, rbno: %d, rlen: %d",
+ nfbno1, nflen1, nfbno2, nflen2, fbno, flen, rbno, rlen);
} else {
nfbno1 = fbno;
nflen1 = rbno - fbno;
nfbno2 = rbno + rlen;
nflen2 = (fbno + flen) - nfbno2;
+ if (isfl)
+ xfs_warn(mp,
+ "Case 3 Inserting: nfbno1: %d, nflen1: %d, nfbno2: %d, nflen2: %d, fbno: %d, flen: %d, rbno: %d, rlen: %d",
+ nfbno1, nflen1, nfbno2, nflen2, fbno, flen, rbno, rlen);
}
/*
* Delete the entry from the by-size btree.
@@ -396,19 +409,31 @@ xfs_alloc_fixup_trees(
* Add new by-size btree entry(s).
*/
if (nfbno1 != NULLAGBLOCK) {
+ struct xfs_btree_block *cntblock;
if ((error = xfs_alloc_lookup_eq(cnt_cur, nfbno1, nflen1, &i)))
return error;
+ cntblock = XFS_BUF_TO_BLOCK(cnt_cur->bc_bufs[0]);
XFS_WANT_CORRUPTED_RETURN(mp, i == 0);
+ xfs_warn(mp,
+ "B+Tree before insert: isfl: %d, bb_numrec: %d, addr: %llu", isfl, xfs_btree_get_numrecs(cntblock), XFS_BUF_ADDR(cnt_cur->bc_bufs[0]));
if ((error = xfs_btree_insert(cnt_cur, &i)))
return error;
+ xfs_warn(mp,
+ "B+Tree after insert: isfl: %d, bb_numrec: %d, addr: %llu", isfl, xfs_btree_get_numrecs(cntblock), XFS_BUF_ADDR(cnt_cur->bc_bufs[0]));
XFS_WANT_CORRUPTED_RETURN(mp, i == 1);
}
if (nfbno2 != NULLAGBLOCK) {
+ struct xfs_btree_block *cntblock;
+ cntblock = XFS_BUF_TO_BLOCK(cnt_cur->bc_bufs[0]);
if ((error = xfs_alloc_lookup_eq(cnt_cur, nfbno2, nflen2, &i)))
return error;
XFS_WANT_CORRUPTED_RETURN(mp, i == 0);
+ xfs_warn(mp,
+ "B+Tree before insert: isfl: %d, bb_numrec: %d, addr: %llu", isfl, xfs_btree_get_numrecs(cntblock), XFS_BUF_ADDR(cnt_cur->bc_bufs[0]));
if ((error = xfs_btree_insert(cnt_cur, &i)))
return error;
+ xfs_warn(mp,
+ "B+Tree after insert: isfl: %d, bb_numrec: %d, addr: %llu", isfl, xfs_btree_get_numrecs(cntblock), XFS_BUF_ADDR(cnt_cur->bc_bufs[0]));
XFS_WANT_CORRUPTED_RETURN(mp, i == 1);
}
/*
@@ -730,7 +755,7 @@ xfs_alloc_ag_vextent_exact(
ASSERT(args->agbno + args->len <=
be32_to_cpu(XFS_BUF_TO_AGF(args->agbp)->agf_length));
error = xfs_alloc_fixup_trees(cnt_cur, bno_cur, fbno, flen, args->agbno,
- args->len, XFSA_FIXUP_BNO_OK);
+ args->len, XFSA_FIXUP_BNO_OK, args->isfl);
if (error) {
xfs_btree_del_cursor(cnt_cur, XFS_BTREE_ERROR);
goto error0;
@@ -1028,7 +1053,7 @@ restart:
* Fix up the btree entries.
*/
if ((error = xfs_alloc_fixup_trees(cnt_cur, bno_cur_lt, ltbno,
- ltlen, bnew, blen, XFSA_FIXUP_CNT_OK)))
+ ltlen, bnew, blen, XFSA_FIXUP_CNT_OK, args->isfl)))
goto error0;
xfs_btree_del_cursor(cnt_cur, XFS_BTREE_NOERROR);
xfs_btree_del_cursor(bno_cur_lt, XFS_BTREE_NOERROR);
@@ -1219,7 +1244,7 @@ restart:
args->agbno = ltnew;
if ((error = xfs_alloc_fixup_trees(cnt_cur, bno_cur_lt, ltbno, ltlen,
- ltnew, rlen, XFSA_FIXUP_BNO_OK)))
+ ltnew, rlen, XFSA_FIXUP_BNO_OK, args->isfl)))
goto error0;
if (j)
@@ -1420,7 +1445,7 @@ restart:
bno_cur = xfs_allocbt_init_cursor(args->mp, args->tp, args->agbp,
args->agno, XFS_BTNUM_BNO);
if ((error = xfs_alloc_fixup_trees(cnt_cur, bno_cur, fbno, flen,
- rbno, rlen, XFSA_FIXUP_CNT_OK)))
+ rbno, rlen, XFSA_FIXUP_CNT_OK, args->isfl)))
goto error0;
xfs_btree_del_cursor(cnt_cur, XFS_BTREE_NOERROR);
xfs_btree_del_cursor(bno_cur, XFS_BTREE_NOERROR);