@@ -681,41 +681,41 @@ fn leftSubtreeLen(input_len: usize) usize {
681681 return @intCast (roundDownToPowerOf2 (full_chunks ) * chunk_length );
682682}
683683
684- const ChunkBatchContext = struct {
684+ const ChunkBatch = struct {
685685 input : []const u8 ,
686686 start_chunk : usize ,
687687 end_chunk : usize ,
688688 cvs : [][8 ]u32 ,
689689 key : [8 ]u32 ,
690690 flags : Flags ,
691- };
692691
693- fn processChunkBatch (ctx : ChunkBatchContext ) void {
694- var cv_buffer : [max_simd_degree * Blake3 .digest_length ]u8 = undefined ;
695- var chunk_idx = ctx .start_chunk ;
696-
697- while (chunk_idx < ctx .end_chunk ) {
698- const remaining = ctx .end_chunk - chunk_idx ;
699- const batch_size = @min (remaining , max_simd_degree );
700- const offset = chunk_idx * chunk_length ;
701- const batch_len = @as (usize , batch_size ) * chunk_length ;
702-
703- const num_cvs = compressChunksParallel (
704- ctx .input [offset .. ][0.. batch_len ],
705- ctx .key ,
706- chunk_idx ,
707- ctx .flags ,
708- & cv_buffer ,
709- );
710-
711- for (0.. num_cvs ) | i | {
712- const cv_bytes = cv_buffer [i * Blake3 .digest_length .. ][0.. Blake3 .digest_length ];
713- ctx .cvs [chunk_idx + i ] = loadCvWords (cv_bytes .* );
714- }
692+ fn process (ctx : ChunkBatch ) void {
693+ var cv_buffer : [max_simd_degree * Blake3 .digest_length ]u8 = undefined ;
694+ var chunk_idx = ctx .start_chunk ;
695+
696+ while (chunk_idx < ctx .end_chunk ) {
697+ const remaining = ctx .end_chunk - chunk_idx ;
698+ const batch_size = @min (remaining , max_simd_degree );
699+ const offset = chunk_idx * chunk_length ;
700+ const batch_len = @as (usize , batch_size ) * chunk_length ;
701+
702+ const num_cvs = compressChunksParallel (
703+ ctx .input [offset .. ][0.. batch_len ],
704+ ctx .key ,
705+ chunk_idx ,
706+ ctx .flags ,
707+ & cv_buffer ,
708+ );
709+
710+ for (0.. num_cvs ) | i | {
711+ const cv_bytes = cv_buffer [i * Blake3 .digest_length .. ][0.. Blake3 .digest_length ];
712+ ctx .cvs [chunk_idx + i ] = loadCvWords (cv_bytes .* );
713+ }
715714
716- chunk_idx += batch_size ;
715+ chunk_idx += batch_size ;
716+ }
717717 }
718- }
718+ };
719719
720720const ParentBatchContext = struct {
721721 input_cvs : [][8 ]u32 ,
@@ -982,7 +982,7 @@ pub const Blake3 = struct {
982982 const start_chunk = worker_id * chunks_per_worker ;
983983 if (start_chunk >= num_full_chunks ) break ;
984984
985- pool .spawnWg (& wait_group , processChunkBatch , .{ChunkBatchContext {
985+ pool .spawnWg (& wait_group , ChunkBatch . process , .{ChunkBatch {
986986 .input = b ,
987987 .start_chunk = start_chunk ,
988988 .end_chunk = @min (start_chunk + chunks_per_worker , num_full_chunks ),
0 commit comments