summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorRuifeng Wang <ruifeng.wang@arm.com>2021-01-14 06:59:22 +0000
committerDavid Marchand <david.marchand@redhat.com>2021-01-14 14:19:57 +0100
commit5702b7bf1ce509d2f40d732ba4e2af5b1269e0d8 (patch)
tree7a273cb54e17204cdc3d6fc1fe887477b49471f1 /lib
parent6e4d4a6381c47cfe6d75225945b3f6a5f8453214 (diff)
lpm: fix vector IPv4 lookup
rte_lpm_lookupx4 could return wrong next hop when more than 256 tbl8 groups are created. This is caused by incorrect type casting of tbl8 group index that been stored in tbl24 entry. The casting caused group index truncation and hence wrong tbl8 group been searched. Issue fixed by applying proper mask to tbl24 entry to get tbl8 group index. Fixes: dc81ebbacaeb ("lpm: extend IPv4 next hop field") Fixes: cbc2f1dccfba ("lpm/arm: support NEON") Fixes: d2cc7959342b ("lpm: add AltiVec for ppc64") Cc: stable@dpdk.org Signed-off-by: Ruifeng Wang <ruifeng.wang@arm.com> Tested-by: David Christensen <drc@linux.vnet.ibm.com> Acked-by: Vladimir Medvedkin <vladimir.medvedkin@intel.com>
Diffstat (limited to 'lib')
-rw-r--r--lib/librte_lpm/rte_lpm_altivec.h8
-rw-r--r--lib/librte_lpm/rte_lpm_neon.h8
-rw-r--r--lib/librte_lpm/rte_lpm_sse.h8
3 files changed, 12 insertions, 12 deletions
diff --git a/lib/librte_lpm/rte_lpm_altivec.h b/lib/librte_lpm/rte_lpm_altivec.h
index 228c41b38e..4fbc1b595d 100644
--- a/lib/librte_lpm/rte_lpm_altivec.h
+++ b/lib/librte_lpm/rte_lpm_altivec.h
@@ -88,28 +88,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[0] = i8.u32[0] +
- (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
tbl[0] = *ptbl;
}
if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[1] = i8.u32[1] +
- (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
tbl[1] = *ptbl;
}
if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[2] = i8.u32[2] +
- (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
tbl[2] = *ptbl;
}
if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[3] = i8.u32[3] +
- (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
tbl[3] = *ptbl;
}
diff --git a/lib/librte_lpm/rte_lpm_neon.h b/lib/librte_lpm/rte_lpm_neon.h
index 6c131d3125..4642a866f1 100644
--- a/lib/librte_lpm/rte_lpm_neon.h
+++ b/lib/librte_lpm/rte_lpm_neon.h
@@ -81,28 +81,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[0] = i8.u32[0] +
- (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
tbl[0] = *ptbl;
}
if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[1] = i8.u32[1] +
- (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
tbl[1] = *ptbl;
}
if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[2] = i8.u32[2] +
- (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
tbl[2] = *ptbl;
}
if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[3] = i8.u32[3] +
- (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
tbl[3] = *ptbl;
}
diff --git a/lib/librte_lpm/rte_lpm_sse.h b/lib/librte_lpm/rte_lpm_sse.h
index 44770b6ff8..eaa863c522 100644
--- a/lib/librte_lpm/rte_lpm_sse.h
+++ b/lib/librte_lpm/rte_lpm_sse.h
@@ -82,28 +82,28 @@ rte_lpm_lookupx4(const struct rte_lpm *lpm, xmm_t ip, uint32_t hop[4],
if (unlikely((pt & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[0] = i8.u32[0] +
- (uint8_t)tbl[0] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[0] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[0]];
tbl[0] = *ptbl;
}
if (unlikely((pt >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[1] = i8.u32[1] +
- (uint8_t)tbl[1] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[1] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[1]];
tbl[1] = *ptbl;
}
if (unlikely((pt2 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[2] = i8.u32[2] +
- (uint8_t)tbl[2] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[2] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[2]];
tbl[2] = *ptbl;
}
if (unlikely((pt2 >> 32 & RTE_LPM_VALID_EXT_ENTRY_BITMASK) ==
RTE_LPM_VALID_EXT_ENTRY_BITMASK)) {
i8.u32[3] = i8.u32[3] +
- (uint8_t)tbl[3] * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
+ (tbl[3] & 0x00FFFFFF) * RTE_LPM_TBL8_GROUP_NUM_ENTRIES;
ptbl = (const uint32_t *)&lpm->tbl8[i8.u32[3]];
tbl[3] = *ptbl;
}