@@ -384,9 +384,8 @@ internal static int LastIndexOfAnyVectorized<TNegator, TOptimizations>(ref short
384
384
return - 1 ;
385
385
}
386
386
387
- internal static int IndexOfAnyVectorized < TNegator , TOptimizations > ( ref byte searchSpace , int searchSpaceLength , Vector128 < byte > bitmap )
387
+ internal static int IndexOfAnyVectorized < TNegator > ( ref byte searchSpace , int searchSpaceLength , Vector128 < byte > bitmap )
388
388
where TNegator : struct , INegator
389
- where TOptimizations : struct , IOptimizations
390
389
{
391
390
ref byte currentSearchSpace = ref searchSpace ;
392
391
@@ -408,7 +407,7 @@ internal static int IndexOfAnyVectorized<TNegator, TOptimizations>(ref byte sear
408
407
{
409
408
Vector256 < byte > source = Vector256 . LoadUnsafe ( ref currentSearchSpace ) ;
410
409
411
- Vector256 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap256 ) ;
410
+ Vector256 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap256 ) ) ;
412
411
if ( result != Vector256 < byte > . Zero )
413
412
{
414
413
return ComputeFirstIndex < byte , TNegator > ( ref searchSpace , ref currentSearchSpace , result ) ;
@@ -433,7 +432,7 @@ internal static int IndexOfAnyVectorized<TNegator, TOptimizations>(ref byte sear
433
432
Vector128 < byte > source1 = Vector128 . LoadUnsafe ( ref halfVectorAwayFromEnd ) ;
434
433
Vector256 < byte > source = Vector256 . Create ( source0 , source1 ) ;
435
434
436
- Vector256 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap256 ) ;
435
+ Vector256 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap256 ) ) ;
437
436
if ( result != Vector256 < byte > . Zero )
438
437
{
439
438
return ComputeFirstIndexOverlapped < byte , TNegator > ( ref searchSpace , ref firstVector , ref halfVectorAwayFromEnd , result ) ;
@@ -454,7 +453,7 @@ internal static int IndexOfAnyVectorized<TNegator, TOptimizations>(ref byte sear
454
453
{
455
454
Vector128 < byte > source = Vector128 . LoadUnsafe ( ref currentSearchSpace ) ;
456
455
457
- Vector128 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap ) ;
456
+ Vector128 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap ) ) ;
458
457
if ( result != Vector128 < byte > . Zero )
459
458
{
460
459
return ComputeFirstIndex < byte , TNegator > ( ref searchSpace , ref currentSearchSpace , result ) ;
@@ -480,7 +479,7 @@ internal static int IndexOfAnyVectorized<TNegator, TOptimizations>(ref byte sear
480
479
ulong source1 = Unsafe . ReadUnaligned < ulong > ( ref halfVectorAwayFromEnd ) ;
481
480
Vector128 < byte > source = Vector128 . Create ( source0 , source1 ) . AsByte ( ) ;
482
481
483
- Vector128 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap ) ;
482
+ Vector128 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap ) ) ;
484
483
if ( result != Vector128 < byte > . Zero )
485
484
{
486
485
return ComputeFirstIndexOverlapped < byte , TNegator > ( ref searchSpace , ref firstVector , ref halfVectorAwayFromEnd , result ) ;
@@ -490,9 +489,8 @@ internal static int IndexOfAnyVectorized<TNegator, TOptimizations>(ref byte sear
490
489
return - 1 ;
491
490
}
492
491
493
- internal static int LastIndexOfAnyVectorized < TNegator , TOptimizations > ( ref byte searchSpace , int searchSpaceLength , Vector128 < byte > bitmap )
492
+ internal static int LastIndexOfAnyVectorized < TNegator > ( ref byte searchSpace , int searchSpaceLength , Vector128 < byte > bitmap )
494
493
where TNegator : struct , INegator
495
- where TOptimizations : struct , IOptimizations
496
494
{
497
495
ref byte currentSearchSpace = ref Unsafe . Add ( ref searchSpace , searchSpaceLength ) ;
498
496
@@ -516,7 +514,7 @@ internal static int LastIndexOfAnyVectorized<TNegator, TOptimizations>(ref byte
516
514
517
515
Vector256 < byte > source = Vector256 . LoadUnsafe ( ref currentSearchSpace ) ;
518
516
519
- Vector256 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap256 ) ;
517
+ Vector256 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap256 ) ) ;
520
518
if ( result != Vector256 < byte > . Zero )
521
519
{
522
520
return ComputeLastIndex < byte , TNegator > ( ref searchSpace , ref currentSearchSpace , result ) ;
@@ -539,7 +537,7 @@ internal static int LastIndexOfAnyVectorized<TNegator, TOptimizations>(ref byte
539
537
Vector128 < byte > source1 = Vector128 . LoadUnsafe ( ref secondVector ) ;
540
538
Vector256 < byte > source = Vector256 . Create ( source0 , source1 ) ;
541
539
542
- Vector256 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap256 ) ;
540
+ Vector256 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap256 ) ) ;
543
541
if ( result != Vector256 < byte > . Zero )
544
542
{
545
543
return ComputeLastIndexOverlapped < byte , TNegator > ( ref searchSpace , ref secondVector , result ) ;
@@ -562,7 +560,7 @@ internal static int LastIndexOfAnyVectorized<TNegator, TOptimizations>(ref byte
562
560
563
561
Vector128 < byte > source = Vector128 . LoadUnsafe ( ref currentSearchSpace ) ;
564
562
565
- Vector128 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap ) ;
563
+ Vector128 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap ) ) ;
566
564
if ( result != Vector128 < byte > . Zero )
567
565
{
568
566
return ComputeLastIndex < byte , TNegator > ( ref searchSpace , ref currentSearchSpace , result ) ;
@@ -586,7 +584,7 @@ internal static int LastIndexOfAnyVectorized<TNegator, TOptimizations>(ref byte
586
584
ulong source1 = Unsafe . ReadUnaligned < ulong > ( ref secondVector ) ;
587
585
Vector128 < byte > source = Vector128 . Create ( source0 , source1 ) . AsByte ( ) ;
588
586
589
- Vector128 < byte > result = IndexOfAnyLookup < TNegator , TOptimizations > ( source , bitmap ) ;
587
+ Vector128 < byte > result = TNegator . NegateIfNeeded ( IndexOfAnyLookupCore ( source , bitmap ) ) ;
590
588
if ( result != Vector128 < byte > . Zero )
591
589
{
592
590
return ComputeLastIndexOverlapped < byte , TNegator > ( ref searchSpace , ref secondVector , result ) ;
@@ -840,23 +838,6 @@ private static Vector128<byte> IndexOfAnyLookup<TNegator, TOptimizations>(Vector
840
838
return TNegator . NegateIfNeeded ( result ) ;
841
839
}
842
840
843
- [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
844
- private static Vector128 < byte > IndexOfAnyLookup < TNegator , TOptimizations > ( Vector128 < byte > source , Vector128 < byte > bitmapLookup )
845
- where TNegator : struct , INegator
846
- where TOptimizations : struct , IOptimizations
847
- {
848
- Vector128 < byte > result = IndexOfAnyLookupCore ( source , bitmapLookup ) ;
849
-
850
- // On X86, values above 127 will map to 0. If 0 is present in the needle, we must clear the false positives.
851
- if ( TOptimizations . NeedleContainsZero )
852
- {
853
- Vector128 < byte > ascii = Vector128 . LessThan ( source , Vector128 . Create ( ( byte ) 128 ) ) ;
854
- result &= ascii ;
855
- }
856
-
857
- return TNegator . NegateIfNeeded ( result ) ;
858
- }
859
-
860
841
[ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
861
842
private static Vector128 < byte > IndexOfAnyLookupCore ( Vector128 < byte > source , Vector128 < byte > bitmapLookup )
862
843
{
@@ -903,23 +884,6 @@ private static Vector256<byte> IndexOfAnyLookup<TNegator, TOptimizations>(Vector
903
884
return TNegator . NegateIfNeeded ( result ) ;
904
885
}
905
886
906
- [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
907
- private static Vector256 < byte > IndexOfAnyLookup < TNegator , TOptimizations > ( Vector256 < byte > source , Vector256 < byte > bitmapLookup )
908
- where TNegator : struct , INegator
909
- where TOptimizations : struct , IOptimizations
910
- {
911
- // See comments in IndexOfAnyLookup(Vector128<byte>) above for more details.
912
- Vector256 < byte > result = IndexOfAnyLookupCore ( source , bitmapLookup ) ;
913
-
914
- if ( TOptimizations . NeedleContainsZero )
915
- {
916
- Vector256 < byte > ascii = Vector256 . LessThan ( source , Vector256 . Create ( ( byte ) 128 ) ) ;
917
- result &= ascii ;
918
- }
919
-
920
- return TNegator . NegateIfNeeded ( result ) ;
921
- }
922
-
923
887
[ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
924
888
private static Vector256 < byte > IndexOfAnyLookupCore ( Vector256 < byte > source , Vector256 < byte > bitmapLookup )
925
889
{
0 commit comments