@@ -186,17 +186,11 @@ internal void DecompressAllTags(ReadOnlySpan<byte> inputSpan)
186186 unchecked
187187 {
188188 ref byte input = ref Unsafe . AsRef ( in inputSpan [ 0 ] ) ;
189-
190- // The reference Snappy implementation uses inputEnd as a pointer one byte past the end of the buffer.
191- // However, this is not safe when using ref locals. The ref must point to somewhere within the array
192- // so that GC can adjust the ref if the memory is moved.
193- ref byte inputEnd = ref Unsafe . Add ( ref input , inputSpan . Length - 1 ) ;
189+ ref byte inputEnd = ref Unsafe . Add ( ref input , inputSpan . Length ) ;
194190
195191 // Track the point in the input before which input is guaranteed to have at least Constants.MaxTagLength bytes left
196- ref byte inputLimitMinMaxTagLength = ref Unsafe . Subtract ( ref inputEnd , Math . Min ( inputSpan . Length , Constants . MaximumTagLength - 1 ) - 1 ) ;
192+ ref byte inputLimitMinMaxTagLength = ref Unsafe . Subtract ( ref inputEnd , Math . Min ( inputSpan . Length , Constants . MaximumTagLength - 1 ) ) ;
197193
198- // We always allocate buffer with at least one extra byte on the end, so bufferEnd doesn't have the same
199- // restrictions as inputEnd.
200194 ref byte buffer = ref _lookbackBuffer . Span [ 0 ] ;
201195 ref byte bufferEnd = ref Unsafe . Add ( ref buffer , _lookbackBuffer . Length ) ;
202196 ref byte op = ref Unsafe . Add ( ref buffer , _lookbackPosition ) ;
@@ -239,9 +233,9 @@ internal void DecompressAllTags(ReadOnlySpan<byte> inputSpan)
239233 {
240234 // Data has been moved to the scratch buffer
241235 input = ref scratch ;
242- inputEnd = ref Unsafe . Add ( ref input , newScratchLength - 1 ) ;
236+ inputEnd = ref Unsafe . Add ( ref input , newScratchLength ) ;
243237 inputLimitMinMaxTagLength = ref Unsafe . Subtract ( ref inputEnd ,
244- Math . Min ( newScratchLength , Constants . MaximumTagLength - 1 ) - 1 ) ;
238+ Math . Min ( newScratchLength , Constants . MaximumTagLength - 1 ) ) ;
245239 }
246240 }
247241
@@ -256,7 +250,7 @@ internal void DecompressAllTags(ReadOnlySpan<byte> inputSpan)
256250 {
257251 nint literalLength = unchecked ( ( c >> 2 ) + 1 ) ;
258252
259- if ( TryFastAppend ( ref op , ref bufferEnd , in input , Unsafe . ByteOffset ( ref input , ref inputEnd ) + 1 , literalLength ) )
253+ if ( TryFastAppend ( ref op , ref bufferEnd , in input , Unsafe . ByteOffset ( ref input , ref inputEnd ) , literalLength ) )
260254 {
261255 Debug . Assert ( literalLength < 61 ) ;
262256 op = ref Unsafe . Add ( ref op , literalLength ) ;
@@ -280,7 +274,7 @@ internal void DecompressAllTags(ReadOnlySpan<byte> inputSpan)
280274 input = ref Unsafe . Add ( ref input , literalLengthLength ) ;
281275 }
282276
283- nint inputRemaining = Unsafe . ByteOffset ( ref input , ref inputEnd ) + 1 ;
277+ nint inputRemaining = Unsafe . ByteOffset ( ref input , ref inputEnd ) ;
284278 if ( inputRemaining < literalLength )
285279 {
286280 Append ( ref op , ref bufferEnd , in input , inputRemaining ) ;
@@ -306,9 +300,9 @@ internal void DecompressAllTags(ReadOnlySpan<byte> inputSpan)
306300 {
307301 // Data has been moved to the scratch buffer
308302 input = ref scratch ;
309- inputEnd = ref Unsafe . Add ( ref input , newScratchLength - 1 ) ;
303+ inputEnd = ref Unsafe . Add ( ref input , newScratchLength ) ;
310304 inputLimitMinMaxTagLength = ref Unsafe . Subtract ( ref inputEnd ,
311- Math . Min ( newScratchLength , Constants . MaximumTagLength - 1 ) - 1 ) ;
305+ Math . Min ( newScratchLength , Constants . MaximumTagLength - 1 ) ) ;
312306
313307 }
314308 }
@@ -367,9 +361,9 @@ internal void DecompressAllTags(ReadOnlySpan<byte> inputSpan)
367361 {
368362 // Data has been moved to the scratch buffer
369363 input = ref scratch ;
370- inputEnd = ref Unsafe . Add ( ref input , newScratchLength - 1 ) ;
364+ inputEnd = ref Unsafe . Add ( ref input , newScratchLength ) ;
371365 inputLimitMinMaxTagLength = ref Unsafe . Subtract ( ref inputEnd ,
372- Math . Min ( newScratchLength , Constants . MaximumTagLength - 1 ) - 1 ) ;
366+ Math . Min ( newScratchLength , Constants . MaximumTagLength - 1 ) ) ;
373367 }
374368 }
375369
@@ -415,7 +409,7 @@ internal void DecompressAllTags(ReadOnlySpan<byte> inputSpan)
415409 ( int ) literalLengthLength ) + 1 ;
416410 }
417411
418- nint inputRemaining = Unsafe . ByteOffset ( ref input , ref inputEnd ) + 1 ;
412+ nint inputRemaining = Unsafe . ByteOffset ( ref input , ref inputEnd ) ;
419413 if ( inputRemaining < literalLength )
420414 {
421415 Append ( ref op , ref bufferEnd , in input , inputRemaining ) ;
@@ -468,7 +462,7 @@ private uint RefillTagFromScratch(ref byte input, ref byte inputEnd, ref byte sc
468462 {
469463 Debug . Assert ( _scratchLength > 0 ) ;
470464
471- if ( Unsafe . IsAddressGreaterThan ( ref input , ref inputEnd ) )
465+ if ( ! Unsafe . IsAddressLessThan ( ref input , ref inputEnd ) )
472466 {
473467 return 0 ;
474468 }
@@ -477,7 +471,7 @@ private uint RefillTagFromScratch(ref byte input, ref byte inputEnd, ref byte sc
477471 uint entry = Constants . CharTable [ scratch ] ;
478472 uint needed = ( entry >> 11 ) + 1 ; // +1 byte for 'c'
479473
480- uint toCopy = Math . Min ( ( uint ) Unsafe . ByteOffset ( ref input , ref inputEnd ) + 1 , needed - _scratchLength ) ;
474+ uint toCopy = Math . Min ( ( uint ) Unsafe . ByteOffset ( ref input , ref inputEnd ) , needed - _scratchLength ) ;
481475 Unsafe . CopyBlockUnaligned ( ref Unsafe . Add ( ref scratch , _scratchLength ) , ref input , toCopy ) ;
482476
483477 _scratchLength += toCopy ;
@@ -502,7 +496,7 @@ private uint RefillTagFromScratch(ref byte input, ref byte inputEnd, ref byte sc
502496 // always have some extra bytes on the end so we don't risk buffer overruns.
503497 private uint RefillTag ( ref byte input , ref byte inputEnd , ref byte scratch )
504498 {
505- if ( Unsafe . IsAddressGreaterThan ( ref input , ref inputEnd ) )
499+ if ( ! Unsafe . IsAddressLessThan ( ref input , ref inputEnd ) )
506500 {
507501 return uint . MaxValue ;
508502 }
@@ -511,7 +505,7 @@ private uint RefillTag(ref byte input, ref byte inputEnd, ref byte scratch)
511505 uint entry = Constants . CharTable [ input ] ;
512506 uint needed = ( entry >> 11 ) + 1 ; // +1 byte for 'c'
513507
514- uint inputLength = ( uint ) Unsafe . ByteOffset ( ref input , ref inputEnd ) + 1 ;
508+ uint inputLength = ( uint ) Unsafe . ByteOffset ( ref input , ref inputEnd ) ;
515509 if ( inputLength < needed )
516510 {
517511 // Data is insufficient, copy to scratch
@@ -555,11 +549,8 @@ private int? ExpectedLength
555549 ArrayPool < byte > . Shared . Return ( _lookbackBufferArray ) ;
556550 }
557551
558- // Always pad the lookback buffer with an extra byte that we don't use. This allows a "ref byte" reference past
559- // the end of the perceived buffer that still points within the array. This is a requirement so that GC can recognize
560- // the "ref byte" points within the array and adjust it if the array is moved.
561- _lookbackBufferArray = ArrayPool < byte > . Shared . Rent ( value . GetValueOrDefault ( ) + 1 ) ;
562- _lookbackBuffer = _lookbackBufferArray . AsMemory ( 0 , _lookbackBufferArray . Length - 1 ) ;
552+ _lookbackBufferArray = ArrayPool < byte > . Shared . Rent ( value . GetValueOrDefault ( ) ) ;
553+ _lookbackBuffer = _lookbackBufferArray . AsMemory ( 0 , _lookbackBufferArray . Length ) ;
563554 }
564555 }
565556 }
@@ -595,7 +586,7 @@ private void Append(ReadOnlySpan<byte> input)
595586 }
596587
597588 [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
598- private void Append ( ref byte op , ref byte bufferEnd , in byte input , nint length )
589+ private static void Append ( ref byte op , ref byte bufferEnd , in byte input , nint length )
599590 {
600591 if ( length > Unsafe . ByteOffset ( ref op , ref bufferEnd ) )
601592 {
@@ -606,7 +597,7 @@ private void Append(ref byte op, ref byte bufferEnd, in byte input, nint length)
606597 }
607598
608599 [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
609- private bool TryFastAppend ( ref byte op , ref byte bufferEnd , in byte input , nint available , nint length )
600+ private static bool TryFastAppend ( ref byte op , ref byte bufferEnd , in byte input , nint available , nint length )
610601 {
611602 if ( length <= 16 && available >= 16 + Constants . MaximumTagLength &&
612603 Unsafe . ByteOffset ( ref op , ref bufferEnd ) >= ( nint ) 16 )
@@ -619,10 +610,13 @@ private bool TryFastAppend(ref byte op, ref byte bufferEnd, in byte input, nint
619610 }
620611
621612 [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
622- private void AppendFromSelf ( ref byte op , ref byte buffer , ref byte bufferEnd , uint copyOffset , nint length )
613+ private static void AppendFromSelf ( ref byte op , ref byte buffer , ref byte bufferEnd , uint copyOffset , nint length )
623614 {
624- ref byte source = ref Unsafe . Subtract ( ref op , copyOffset ) ;
625- if ( ! Unsafe . IsAddressLessThan ( ref source , ref op ) || Unsafe . IsAddressLessThan ( ref source , ref buffer ) )
615+ // ToInt64() ensures that this logic works correctly on x86 (with a slight perf hit on x86, though). This is because
616+ // nint is only 32-bit on x86, so casting uint copyOffset to an nint for the comparison can result in a negative number with some
617+ // forms of illegal data. This would then bypass the exception and cause unsafe memory access. Performing the comparison
618+ // as a long ensures we have enough bits to not lose data. On 64-bit platforms this is effectively a no-op.
619+ if ( copyOffset == 0 || Unsafe . ByteOffset ( ref buffer , ref op ) . ToInt64 ( ) < copyOffset )
626620 {
627621 ThrowHelper . ThrowInvalidDataException ( "Invalid copy offset" ) ;
628622 }
@@ -632,6 +626,7 @@ private void AppendFromSelf(ref byte op, ref byte buffer, ref byte bufferEnd, ui
632626 ThrowHelper . ThrowInvalidDataException ( "Data too long" ) ;
633627 }
634628
629+ ref byte source = ref Unsafe . Subtract ( ref op , copyOffset ) ;
635630 CopyHelpers . IncrementalCopy ( ref source , ref op ,
636631 ref Unsafe . Add ( ref op , length ) , ref bufferEnd ) ;
637632 }
0 commit comments