@@ -440,7 +440,9 @@ fn test_unsubscribe() {
440
440
441
441
for topic_hash in & topic_hashes {
442
442
assert ! (
443
- gs. topic_peers. contains_key( topic_hash) ,
443
+ gs. connected_peers
444
+ . values( )
445
+ . any( |p| p. topics. contains( topic_hash) ) ,
444
446
"Topic_peers contain a topic entry"
445
447
) ;
446
448
assert ! (
@@ -629,8 +631,11 @@ fn test_publish_without_flood_publishing() {
629
631
630
632
// all peers should be subscribed to the topic
631
633
assert_eq ! (
632
- gs. topic_peers. get( & topic_hashes[ 0 ] ) . map( |p| p. len( ) ) ,
633
- Some ( 20 ) ,
634
+ gs. connected_peers
635
+ . values( )
636
+ . filter( |p| p. topics. contains( & topic_hashes[ 0 ] ) )
637
+ . count( ) ,
638
+ 20 ,
634
639
"Peers should be subscribed to the topic"
635
640
) ;
636
641
@@ -669,8 +674,8 @@ fn test_publish_without_flood_publishing() {
669
674
let config: Config = Config :: default ( ) ;
670
675
assert_eq ! (
671
676
publishes. len( ) ,
672
- config. mesh_n_low ( ) ,
673
- "Should send a publish message to all known peers"
677
+ config. mesh_n ( ) ,
678
+ "Should send a publish message to at least mesh_n peers"
674
679
) ;
675
680
676
681
assert ! (
@@ -809,9 +814,9 @@ fn test_inject_connected() {
809
814
810
815
// should add the new peers to `peer_topics` with an empty vec as a gossipsub node
811
816
for peer in peers {
812
- let known_topics = gs. peer_topics . get ( & peer) . unwrap ( ) ;
817
+ let peer = gs. connected_peers . get ( & peer) . unwrap ( ) ;
813
818
assert ! (
814
- known_topics == & topic_hashes. iter( ) . cloned( ) . collect( ) ,
819
+ peer . topics == topic_hashes. iter( ) . cloned( ) . collect( ) ,
815
820
"The topics for each node should all topics"
816
821
) ;
817
822
}
@@ -860,24 +865,39 @@ fn test_handle_received_subscriptions() {
860
865
861
866
// verify the result
862
867
863
- let peer_topics = gs. peer_topics . get ( & peers[ 0 ] ) . unwrap ( ) . clone ( ) ;
868
+ let peer = gs. connected_peers . get ( & peers[ 0 ] ) . unwrap ( ) ;
864
869
assert ! (
865
- peer_topics == topic_hashes. iter( ) . take( 3 ) . cloned( ) . collect( ) ,
870
+ peer. topics
871
+ == topic_hashes
872
+ . iter( )
873
+ . take( 3 )
874
+ . cloned( )
875
+ . collect:: <BTreeSet <_>>( ) ,
866
876
"First peer should be subscribed to three topics"
867
877
) ;
868
- let peer_topics = gs. peer_topics . get ( & peers[ 1 ] ) . unwrap ( ) . clone ( ) ;
878
+ let peer1 = gs. connected_peers . get ( & peers[ 1 ] ) . unwrap ( ) ;
869
879
assert ! (
870
- peer_topics == topic_hashes. iter( ) . take( 3 ) . cloned( ) . collect( ) ,
880
+ peer1. topics
881
+ == topic_hashes
882
+ . iter( )
883
+ . take( 3 )
884
+ . cloned( )
885
+ . collect:: <BTreeSet <_>>( ) ,
871
886
"Second peer should be subscribed to three topics"
872
887
) ;
873
888
874
889
assert ! (
875
- !gs. peer_topics . contains_key( & unknown_peer) ,
890
+ !gs. connected_peers . contains_key( & unknown_peer) ,
876
891
"Unknown peer should not have been added"
877
892
) ;
878
893
879
894
for topic_hash in topic_hashes[ ..3 ] . iter ( ) {
880
- let topic_peers = gs. topic_peers . get ( topic_hash) . unwrap ( ) . clone ( ) ;
895
+ let topic_peers = gs
896
+ . connected_peers
897
+ . iter ( )
898
+ . filter ( |( _, p) | p. topics . contains ( topic_hash) )
899
+ . map ( |( peer_id, _) | * peer_id)
900
+ . collect :: < BTreeSet < PeerId > > ( ) ;
881
901
assert ! (
882
902
topic_peers == peers[ ..2 ] . iter( ) . cloned( ) . collect( ) ,
883
903
"Two peers should be added to the first three topics"
@@ -894,13 +914,21 @@ fn test_handle_received_subscriptions() {
894
914
& peers[ 0 ] ,
895
915
) ;
896
916
897
- let peer_topics = gs. peer_topics . get ( & peers[ 0 ] ) . unwrap ( ) . clone ( ) ;
898
- assert ! (
899
- peer_topics == topic_hashes[ 1 ..3 ] . iter( ) . cloned( ) . collect( ) ,
917
+ let peer = gs. connected_peers . get ( & peers[ 0 ] ) . unwrap ( ) . clone ( ) ;
918
+ assert_eq ! (
919
+ peer. topics,
920
+ topic_hashes[ 1 ..3 ] . iter( ) . cloned( ) . collect:: <BTreeSet <_>>( ) ,
900
921
"Peer should be subscribed to two topics"
901
922
) ;
902
923
903
- let topic_peers = gs. topic_peers . get ( & topic_hashes[ 0 ] ) . unwrap ( ) . clone ( ) ; // only gossipsub at the moment
924
+ // only gossipsub at the moment
925
+ let topic_peers = gs
926
+ . connected_peers
927
+ . iter ( )
928
+ . filter ( |( _, p) | p. topics . contains ( & topic_hashes[ 0 ] ) )
929
+ . map ( |( peer_id, _) | * peer_id)
930
+ . collect :: < BTreeSet < PeerId > > ( ) ;
931
+
904
932
assert ! (
905
933
topic_peers == peers[ 1 ..2 ] . iter( ) . cloned( ) . collect( ) ,
906
934
"Only the second peers should be in the first topic"
@@ -924,9 +952,8 @@ fn test_get_random_peers() {
924
952
for _ in 0 ..20 {
925
953
peers. push ( PeerId :: random ( ) )
926
954
}
927
-
928
- gs. topic_peers
929
- . insert ( topic_hash. clone ( ) , peers. iter ( ) . cloned ( ) . collect ( ) ) ;
955
+ let mut topics = BTreeSet :: new ( ) ;
956
+ topics. insert ( topic_hash. clone ( ) ) ;
930
957
931
958
gs. connected_peers = peers
932
959
. iter ( )
@@ -936,52 +963,32 @@ fn test_get_random_peers() {
936
963
PeerConnections {
937
964
kind : PeerKind :: Gossipsubv1_1 ,
938
965
connections : vec ! [ ConnectionId :: new_unchecked( 0 ) ] ,
966
+ topics : topics. clone ( ) ,
939
967
} ,
940
968
)
941
969
} )
942
970
. collect ( ) ;
943
971
944
- let random_peers =
945
- get_random_peers ( & gs. topic_peers , & gs. connected_peers , & topic_hash, 5 , |_| {
946
- true
947
- } ) ;
972
+ let random_peers = get_random_peers ( & gs. connected_peers , & topic_hash, 5 , |_| true ) ;
948
973
assert_eq ! ( random_peers. len( ) , 5 , "Expected 5 peers to be returned" ) ;
949
- let random_peers = get_random_peers (
950
- & gs. topic_peers ,
951
- & gs. connected_peers ,
952
- & topic_hash,
953
- 30 ,
954
- |_| true ,
955
- ) ;
974
+ let random_peers = get_random_peers ( & gs. connected_peers , & topic_hash, 30 , |_| true ) ;
956
975
assert ! ( random_peers. len( ) == 20 , "Expected 20 peers to be returned" ) ;
957
976
assert ! (
958
977
random_peers == peers. iter( ) . cloned( ) . collect( ) ,
959
978
"Expected no shuffling"
960
979
) ;
961
- let random_peers = get_random_peers (
962
- & gs. topic_peers ,
963
- & gs. connected_peers ,
964
- & topic_hash,
965
- 20 ,
966
- |_| true ,
967
- ) ;
980
+ let random_peers = get_random_peers ( & gs. connected_peers , & topic_hash, 20 , |_| true ) ;
968
981
assert ! ( random_peers. len( ) == 20 , "Expected 20 peers to be returned" ) ;
969
982
assert ! (
970
983
random_peers == peers. iter( ) . cloned( ) . collect( ) ,
971
984
"Expected no shuffling"
972
985
) ;
973
- let random_peers =
974
- get_random_peers ( & gs. topic_peers , & gs. connected_peers , & topic_hash, 0 , |_| {
975
- true
976
- } ) ;
986
+ let random_peers = get_random_peers ( & gs. connected_peers , & topic_hash, 0 , |_| true ) ;
977
987
assert ! ( random_peers. is_empty( ) , "Expected 0 peers to be returned" ) ;
978
988
// test the filter
979
- let random_peers =
980
- get_random_peers ( & gs. topic_peers , & gs. connected_peers , & topic_hash, 5 , |_| {
981
- false
982
- } ) ;
989
+ let random_peers = get_random_peers ( & gs. connected_peers , & topic_hash, 5 , |_| false ) ;
983
990
assert ! ( random_peers. is_empty( ) , "Expected 0 peers to be returned" ) ;
984
- let random_peers = get_random_peers ( & gs. topic_peers , & gs . connected_peers , & topic_hash, 10 , {
991
+ let random_peers = get_random_peers ( & gs. connected_peers , & topic_hash, 10 , {
985
992
|peer| peers. contains ( peer)
986
993
} ) ;
987
994
assert ! ( random_peers. len( ) == 10 , "Expected 10 peers to be returned" ) ;
0 commit comments