Skip to content

Commit 26315fb

Browse files
fix yuma3 inactive neurons bonds computation
1 parent 8c2eb91 commit 26315fb

File tree

4 files changed

+144
-3
lines changed

4 files changed

+144
-3
lines changed

pallets/subtensor/src/epoch/math.rs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -565,6 +565,22 @@ pub fn inplace_mask_rows(mask: &[bool], matrix: &mut [Vec<I32F32>]) {
565565
});
566566
}
567567

568+
// Apply row mask to sparse matrix, mask=true will set the values on that row to to 0
569+
#[allow(dead_code)]
570+
pub fn inplace_mask_rows_sparse(mask: &[bool], sparse_matrix: &mut [Vec<(u16, I32F32)>]) {
571+
assert_eq!(sparse_matrix.len(), mask.len());
572+
sparse_matrix
573+
.iter_mut()
574+
.zip(mask)
575+
.for_each(|(sparse_row, mask_row)| {
576+
if *mask_row {
577+
sparse_row.iter_mut().for_each(|(_j, value)| {
578+
*value = I32F32::saturating_from_num(0);
579+
});
580+
}
581+
});
582+
}
583+
568584
// Apply column mask to matrix, mask=true will mask out, i.e. set to 0.
569585
// Assumes each column has the same length.
570586
#[allow(dead_code)]

pallets/subtensor/src/epoch/run_epoch.rs

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ impl<T: Config> Pallet<T> {
212212
// Calculate weights for bonds, apply bonds penalty to weights.
213213
// bonds_penalty = 0: weights_for_bonds = weights.clone()
214214
// bonds_penalty = 1: weights_for_bonds = clipped_weights.clone()
215-
let weights_for_bonds: Vec<Vec<I32F32>> =
215+
let mut weights_for_bonds: Vec<Vec<I32F32>> =
216216
interpolate(&weights, &clipped_weights, bonds_penalty);
217217

218218
let mut dividends: Vec<I32F32>;
@@ -223,6 +223,13 @@ impl<T: Config> Pallet<T> {
223223
inplace_mask_cols(&recently_registered, &mut bonds); // mask outdated bonds
224224
log::trace!("B: {:?}", &bonds);
225225

226+
// Inactive neurons bonds are computed assuming 0 weights.
227+
inplace_mask_rows(&inactive, &mut weights_for_bonds);
228+
log::trace!(
229+
"Weights for bonds (active neurons): {:?}",
230+
&weights_for_bonds
231+
);
232+
226233
// Compute the Exponential Moving Average (EMA) of bonds.
227234
ema_bonds = Self::compute_bonds(netuid, &weights_for_bonds, &bonds, &consensus);
228235
log::trace!("emaB: {:?}", &ema_bonds);
@@ -628,7 +635,7 @@ impl<T: Config> Pallet<T> {
628635
// Calculate weights for bonds, apply bonds penalty to weights.
629636
// bonds_penalty = 0: weights_for_bonds = weights.clone()
630637
// bonds_penalty = 1: weights_for_bonds = clipped_weights.clone()
631-
let weights_for_bonds: Vec<Vec<(u16, I32F32)>> =
638+
let mut weights_for_bonds: Vec<Vec<(u16, I32F32)>> =
632639
interpolate_sparse(&weights, &clipped_weights, n, bonds_penalty);
633640

634641
let mut dividends: Vec<I32F32>;
@@ -650,8 +657,15 @@ impl<T: Config> Pallet<T> {
650657
);
651658
log::trace!("Bonds: (mask) {:?}", &bonds);
652659

653-
// Compute the Exponential Moving Average (EMA) of bonds.
660+
// Inactive neurons bonds are computed assuming 0 weights.
654661
log::trace!("weights_for_bonds: {:?}", &weights_for_bonds);
662+
inplace_mask_rows_sparse(&inactive, &mut weights_for_bonds);
663+
log::trace!(
664+
"Weights for bonds (active neurons): {:?}",
665+
&weights_for_bonds
666+
);
667+
668+
// Compute the Exponential Moving Average (EMA) of bonds.
655669
ema_bonds = Self::compute_bonds_sparse(netuid, &weights_for_bonds, &bonds, &consensus);
656670
log::trace!("emaB: {:?}", &ema_bonds);
657671

pallets/subtensor/src/tests/epoch.rs

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2755,6 +2755,75 @@ fn set_yuma_3_weights(netuid: NetUid, weights: Vec<Vec<u16>>, indices: Vec<u16>)
27552755
}
27562756
}
27572757

2758+
#[test]
2759+
fn test_yuma_3_inactive_bonds() {
2760+
// Test how bonds change over epochs for active vs inactive validators
2761+
for sparse in [true, false].iter() {
2762+
new_test_ext(1).execute_with(|| {
2763+
let n: u16 = 4; // 2 validators, 2 servers
2764+
let netuid = NetUid::from(1);
2765+
let max_stake: u64 = 8;
2766+
let stakes: Vec<u64> = vec![5, 5, 0, 0];
2767+
let weights_to_set: Vec<u16> = vec![u16::MAX, 0];
2768+
let miner_indices: Vec<u16> = vec![2, 3];
2769+
2770+
setup_yuma_3_scenario(netuid, n, *sparse, max_stake, stakes);
2771+
2772+
// at epoch 4 validator will go inactive if weights not set
2773+
SubtensorModule::set_activity_cutoff(netuid, 3);
2774+
2775+
// set initial weights
2776+
set_yuma_3_weights(
2777+
netuid,
2778+
vec![weights_to_set.clone(); 2],
2779+
miner_indices.clone(),
2780+
);
2781+
2782+
let all_targets_bonds = [
2783+
vec![vec![0.101319, 0.0000], vec![0.101319, 0.0000]],
2784+
vec![vec![0.192370, 0.0000], vec![0.192370, 0.0000]],
2785+
vec![vec![0.274204, 0.0000], vec![0.274204, 0.0000]],
2786+
vec![vec![0.241580, 0.0000], vec![0.347737, 0.0000]],
2787+
vec![vec![0.214023, 0.0000], vec![0.413824, 0.0000]],
2788+
vec![vec![0.293659, 0.0000], vec![0.473212, 0.0000]],
2789+
vec![vec![0.365224, 0.0000], vec![0.526588, 0.0000]],
2790+
vec![vec![0.429541, 0.0000], vec![0.574547, 0.0000]],
2791+
];
2792+
2793+
for (epoch, target_bonds) in all_targets_bonds.iter().enumerate() {
2794+
if epoch == 2 {
2795+
// Set weight only on validator 1 and let the other become inactive
2796+
assert_ok!(SubtensorModule::set_weights(
2797+
RuntimeOrigin::signed(U256::from(1)),
2798+
netuid,
2799+
miner_indices.clone(),
2800+
weights_to_set.clone(),
2801+
0
2802+
));
2803+
}
2804+
if epoch == 5 {
2805+
// all 2 validators are active again
2806+
set_yuma_3_weights(
2807+
netuid,
2808+
vec![weights_to_set.clone(); 2],
2809+
miner_indices.clone(),
2810+
);
2811+
}
2812+
run_epoch(netuid, *sparse);
2813+
2814+
// Check bonds values
2815+
let bonds = SubtensorModule::get_bonds_fixed_proportion(netuid);
2816+
for (bond, target_bond) in bonds.iter().zip(target_bonds.iter()) {
2817+
// skip the 2 validators bonds 0 values
2818+
for (b, t) in bond.iter().skip(2).zip(target_bond) {
2819+
assert_approx_eq(*b, fixed(*t), I32F32::from_num(1e-3));
2820+
}
2821+
}
2822+
}
2823+
});
2824+
}
2825+
}
2826+
27582827
#[test]
27592828
fn test_yuma_3_kappa_moves_first() {
27602829
for sparse in [true, false].iter() {

pallets/subtensor/src/tests/math.rs

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1027,6 +1027,48 @@ fn test_math_inplace_mask_rows() {
10271027
);
10281028
}
10291029

1030+
#[test]
1031+
fn test_math_inplace_mask_rows_sparse() {
1032+
let input: Vec<f32> = vec![1., 2., 3., 4., 5., 6., 7., 8., 9.];
1033+
let mask: Vec<bool> = vec![false, false, false];
1034+
let target: Vec<f32> = vec![1., 2., 3., 4., 5., 6., 7., 8., 9.];
1035+
let mut mat = vec_to_sparse_mat_fixed(&input, 3, false);
1036+
inplace_mask_rows_sparse(&mask, &mut mat);
1037+
assert_sparse_mat_compare(
1038+
&mat,
1039+
&vec_to_sparse_mat_fixed(&target, 3, false),
1040+
I32F32::from_num(0),
1041+
);
1042+
let mask: Vec<bool> = vec![true, true, true];
1043+
let target: Vec<f32> = vec![0., 0., 0., 0., 0., 0., 0., 0., 0.];
1044+
let mut mat = vec_to_sparse_mat_fixed(&input, 3, false);
1045+
inplace_mask_rows_sparse(&mask, &mut mat);
1046+
assert_sparse_mat_compare(
1047+
&mat,
1048+
&vec_to_sparse_mat_fixed(&target, 3, false),
1049+
I32F32::from_num(0),
1050+
);
1051+
let mask: Vec<bool> = vec![true, false, true];
1052+
let target: Vec<f32> = vec![0., 0., 0., 4., 5., 6., 0., 0., 0.];
1053+
let mut mat = vec_to_sparse_mat_fixed(&input, 3, false);
1054+
inplace_mask_rows_sparse(&mask, &mut mat);
1055+
assert_sparse_mat_compare(
1056+
&mat,
1057+
&vec_to_sparse_mat_fixed(&target, 3, false),
1058+
I32F32::from_num(0),
1059+
);
1060+
let input: Vec<f32> = vec![0., 0., 0., 0., 0., 0., 0., 0., 0.];
1061+
let mut mat = vec_to_sparse_mat_fixed(&input, 3, false);
1062+
let mask: Vec<bool> = vec![false, false, false];
1063+
let target: Vec<f32> = vec![0., 0., 0., 0., 0., 0., 0., 0., 0.];
1064+
inplace_mask_rows_sparse(&mask, &mut mat);
1065+
assert_sparse_mat_compare(
1066+
&mat,
1067+
&vec_to_sparse_mat_fixed(&target, 3, false),
1068+
I32F32::from_num(0),
1069+
);
1070+
}
1071+
10301072
#[test]
10311073
fn test_math_inplace_mask_diag() {
10321074
let vector: Vec<f32> = vec![1., 2., 3., 4., 5., 6., 7., 8., 9.];

0 commit comments

Comments
 (0)