VOL-3419: OpenOLT adapter at scale constantly takes more that 10 seconds to react to flows
- Pass information to agent to do the flow replication
- Consolidate various locks in the adapter and remove reduntant locks
- use voltha-proto version 4.0.2 and voltha-lib-go version 4.0.0
- Bump adapter version to 3.0.0
Change-Id: Ic053c54e5319bb1736ec74facfc79dd10058ecf5
diff --git a/internal/pkg/core/openolt_flowmgr_test.go b/internal/pkg/core/openolt_flowmgr_test.go
index b75bfac..9464830 100644
--- a/internal/pkg/core/openolt_flowmgr_test.go
+++ b/internal/pkg/core/openolt_flowmgr_test.go
@@ -27,19 +27,19 @@
"testing"
"time"
- "github.com/opencord/voltha-protos/v3/go/voltha"
+ "github.com/opencord/voltha-protos/v4/go/voltha"
- "github.com/opencord/voltha-lib-go/v3/pkg/db"
- fu "github.com/opencord/voltha-lib-go/v3/pkg/flows"
- "github.com/opencord/voltha-lib-go/v3/pkg/log"
- tp "github.com/opencord/voltha-lib-go/v3/pkg/techprofile"
+ "github.com/opencord/voltha-lib-go/v4/pkg/db"
+ fu "github.com/opencord/voltha-lib-go/v4/pkg/flows"
+ "github.com/opencord/voltha-lib-go/v4/pkg/log"
+ tp "github.com/opencord/voltha-lib-go/v4/pkg/techprofile"
"github.com/opencord/voltha-openolt-adapter/internal/pkg/resourcemanager"
rsrcMgr "github.com/opencord/voltha-openolt-adapter/internal/pkg/resourcemanager"
"github.com/opencord/voltha-openolt-adapter/pkg/mocks"
- ofp "github.com/opencord/voltha-protos/v3/go/openflow_13"
- "github.com/opencord/voltha-protos/v3/go/openolt"
- openoltpb2 "github.com/opencord/voltha-protos/v3/go/openolt"
- tp_pb "github.com/opencord/voltha-protos/v3/go/tech_profile"
+ ofp "github.com/opencord/voltha-protos/v4/go/openflow_13"
+ "github.com/opencord/voltha-protos/v4/go/openolt"
+ openoltpb2 "github.com/opencord/voltha-protos/v4/go/openolt"
+ tp_pb "github.com/opencord/voltha-protos/v4/go/tech_profile"
)
var flowMgr []*OpenOltFlowMgr
@@ -604,7 +604,7 @@
// clean the flowMgr
for i := 0; i < intfNum; i++ {
- flowMgr[i].onuGemInfo = make(map[uint32][]rsrcMgr.OnuGemInfo, intfNum)
+ flowMgr[i].onuGemInfo = make([]rsrcMgr.OnuGemInfo, 0)
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
@@ -637,13 +637,13 @@
// check that each entry of onuGemInfo has the correct number of ONUs
for i := 0; i < intfNum; i++ {
- lenofOnu := len(flowMgr[i].onuGemInfo[uint32(i)])
+ lenofOnu := len(flowMgr[i].onuGemInfo)
if onuNum != lenofOnu {
t.Errorf("OnuGemInfo length is not as expected len = %d, want %d", lenofOnu, onuNum)
}
for o := 1; o <= onuNum; o++ {
- lenOfGemPorts := len(flowMgr[i].onuGemInfo[uint32(i)][o-1].GemPorts)
+ lenOfGemPorts := len(flowMgr[i].onuGemInfo[o-1].GemPorts)
// check that each onuEntry has 1 gemPort
if lenOfGemPorts != 1 {
t.Errorf("Expected 1 GemPort per ONU, found %d", lenOfGemPorts)
@@ -651,7 +651,7 @@
// check that the value of the gemport is correct
gemID, _ := strconv.Atoi(fmt.Sprintf("90%d%d", i, o-1))
- currentValue := flowMgr[i].onuGemInfo[uint32(i)][o-1].GemPorts[0]
+ currentValue := flowMgr[i].onuGemInfo[o-1].GemPorts[0]
if uint32(gemID) != currentValue {
t.Errorf("Expected GemPort value to be %d, found %d", gemID, currentValue)
}
@@ -698,11 +698,11 @@
for _, gemPortDeleted := range tt.args.gemPortIDsToBeDeleted {
flowMgr[tt.args.intfID].deleteGemPortFromLocalCache(ctx, tt.args.intfID, tt.args.onuID, gemPortDeleted)
}
- lenofGemPorts := len(flowMgr[tt.args.intfID].onuGemInfo[tt.args.intfID][0].GemPorts)
+ lenofGemPorts := len(flowMgr[tt.args.intfID].onuGemInfo[0].GemPorts)
if lenofGemPorts != tt.args.finalLength {
t.Errorf("GemPorts length is not as expected len = %d, want %d", lenofGemPorts, tt.args.finalLength)
}
- gemPorts := flowMgr[tt.args.intfID].onuGemInfo[tt.args.intfID][0].GemPorts
+ gemPorts := flowMgr[tt.args.intfID].onuGemInfo[0].GemPorts
if !reflect.DeepEqual(tt.args.gemPortIDsRemaining, gemPorts) {
t.Errorf("GemPorts are not as expected = %v, want %v", gemPorts, tt.args.gemPortIDsRemaining)
}
@@ -964,11 +964,6 @@
return
}
- //ofpMeterConfig := &ofp.OfpMeterConfig{Flags: 1, MeterId: 1}
- //flowMetadata := &voltha.FlowMetadata{
- // Meters: []*ofp.OfpMeterConfig{ofpMeterConfig},
- //}
-
TpInst := &tp.TechProfile{
Name: "Test-Tech-Profile",
SubscriberIdentifier: "257",
@@ -980,6 +975,39 @@
Uni: "16",
},
}
+ TpInst.UsScheduler.Priority = 1
+ TpInst.UsScheduler.Direction = "upstream"
+ TpInst.UsScheduler.AllocID = 1
+ TpInst.UsScheduler.AdditionalBw = "None"
+ TpInst.UsScheduler.QSchedPolicy = "PQ"
+ TpInst.UsScheduler.Weight = 4
+
+ TpInst.DsScheduler.Priority = 1
+ TpInst.DsScheduler.Direction = "upstream"
+ TpInst.DsScheduler.AllocID = 1
+ TpInst.DsScheduler.AdditionalBw = "None"
+ TpInst.DsScheduler.QSchedPolicy = "PQ"
+ TpInst.DsScheduler.Weight = 4
+
+ TpInst.UpstreamGemPortAttributeList = make([]tp.IGemPortAttribute, 4)
+ TpInst.UpstreamGemPortAttributeList[0].GemportID = 1
+ TpInst.UpstreamGemPortAttributeList[0].PbitMap = "0b00000011"
+ TpInst.UpstreamGemPortAttributeList[0].GemportID = 2
+ TpInst.UpstreamGemPortAttributeList[0].PbitMap = "0b00001100"
+ TpInst.UpstreamGemPortAttributeList[0].GemportID = 3
+ TpInst.UpstreamGemPortAttributeList[0].PbitMap = "0b00110000"
+ TpInst.UpstreamGemPortAttributeList[0].GemportID = 4
+ TpInst.UpstreamGemPortAttributeList[0].PbitMap = "0b11000000"
+
+ TpInst.DownstreamGemPortAttributeList = make([]tp.IGemPortAttribute, 4)
+ TpInst.DownstreamGemPortAttributeList[0].GemportID = 1
+ TpInst.DownstreamGemPortAttributeList[0].PbitMap = "0b00000011"
+ TpInst.DownstreamGemPortAttributeList[0].GemportID = 2
+ TpInst.DownstreamGemPortAttributeList[0].PbitMap = "0b00001100"
+ TpInst.DownstreamGemPortAttributeList[0].GemportID = 3
+ TpInst.DownstreamGemPortAttributeList[0].PbitMap = "0b00110000"
+ TpInst.DownstreamGemPortAttributeList[0].GemportID = 4
+ TpInst.DownstreamGemPortAttributeList[0].PbitMap = "0b11000000"
type args struct {
args map[string]uint32
@@ -1014,7 +1042,7 @@
uniID: 16,
portNo: 1,
TpInst: TpInst,
- allocID: []uint32{0x8001, 0x8002, 0x8003, 0x8004},
+ allocID: []uint32{0x8001},
gemPorts: []uint32{1, 2, 3, 4},
TpID: 64,
uni: "16",
@@ -1033,7 +1061,7 @@
uniID: 16,
portNo: 1,
TpInst: TpInst,
- allocID: []uint32{0x8001, 0x8002, 0x8003, 0x8004},
+ allocID: []uint32{0x8001},
gemPorts: []uint32{1, 2, 3, 4},
TpID: 64,
uni: "16",
@@ -1052,7 +1080,7 @@
uniID: 16,
portNo: 1,
TpInst: TpInst,
- allocID: []uint32{0x8001, 0x8002, 0x8003, 0x8004},
+ allocID: []uint32{0x8001},
gemPorts: []uint32{1, 2, 3, 4},
TpID: 64,
uni: "16",
@@ -1071,7 +1099,7 @@
uniID: 16,
portNo: 1,
TpInst: TpInst,
- allocID: []uint32{0x8001, 0x8002, 0x8003, 0x8004},
+ allocID: []uint32{0x8001},
gemPorts: []uint32{1, 2, 3, 4},
TpID: 64,
uni: "16",