Skip to content

Commit

Permalink
lpm: fix vector IPv4 lookup
Browse files Browse the repository at this point in the history
[ upstream commit 5702b7b ]

rte_lpm_lookupx4 could return wrong next hop when more than 256 tbl8
groups are created. This is caused by incorrect type casting of tbl8
group index that been stored in tbl24 entry. The casting caused group
index truncation and hence wrong tbl8 group been searched.

Issue fixed by applying proper mask to tbl24 entry to get tbl8 group index.

Fixes: dc81ebb ("lpm: extend IPv4 next hop field")
Fixes: cbc2f1d ("lpm/arm: support NEON")
Fixes: d2cc795 ("lpm: add AltiVec for ppc64")

Signed-off-by: Ruifeng Wang <ruifeng.wang@arm.com>
Tested-by: David Christensen <drc@linux.vnet.ibm.com>
Acked-by: Vladimir Medvedkin <vladimir.medvedkin@intel.com>
  • Loading branch information
Reyfone authored and bluca committed Feb 2, 2021
1 parent a0e0fe9 commit c9a4a0d
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 12 deletions.
8 changes: 4 additions & 4 deletions lib/librte_lpm/rte_lpm_altivec.h
Original file line number Diff line number Diff line change
Expand Up @@ -88,28 +88,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[0] = i8.u32[0] +
(uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
tbl[0] = *ptbl;
}
if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[1] = i8.u32[1] +
(uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
tbl[1] = *ptbl;
}
if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[2] = i8.u32[2] +
(uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
tbl[2] = *ptbl;
}
if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[3] = i8.u32[3] +
(uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
tbl[3] = *ptbl;
}
Expand Down
8 changes: 4 additions & 4 deletions lib/librte_lpm/rte_lpm_neon.h
Original file line number Diff line number Diff line change
Expand Up @@ -81,28 +81,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[0] = i8.u32[0] +
(uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
tbl[0] = *ptbl;
}
if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[1] = i8.u32[1] +
(uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
tbl[1] = *ptbl;
}
if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[2] = i8.u32[2] +
(uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
tbl[2] = *ptbl;
}
if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[3] = i8.u32[3] +
(uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
tbl[3] = *ptbl;
}
Expand Down
8 changes: 4 additions & 4 deletions lib/librte_lpm/rte_lpm_sse.h
Original file line number Diff line number Diff line change
Expand Up @@ -82,28 +82,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[0] = i8.u32[0] +
(uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
tbl[0] = *ptbl;
}
if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[1] = i8.u32[1] +
(uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
tbl[1] = *ptbl;
}
if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[2] = i8.u32[2] +
(uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
tbl[2] = *ptbl;
}
if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[3] = i8.u32[3] +
(uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
(tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
tbl[3] = *ptbl;
}
Expand Down

0 comments on commit c9a4a0d

Please sign in to comment.