Skip to content

Commit

Permalink
Correctly track capacity when initial buffer is empty
Browse files Browse the repository at this point in the history
This fixes gh-75.
  • Loading branch information
bos committed Aug 26, 2014
1 parent 1780eaf commit 3c47ecd
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 3 deletions.
3 changes: 2 additions & 1 deletion Data/Attoparsec/ByteString/Buffer.hs
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ import Foreign.Storable (peek, peekByteOff, poke, sizeOf)
import GHC.ForeignPtr (mallocPlainForeignPtrBytes)
import Prelude hiding (length)

-- If _cap is zero, this buffer is empty.
data Buffer = Buf {
_fp :: {-# UNPACK #-} !(ForeignPtr Word8)
, _off :: {-# UNPACK #-} !Int
Expand Down Expand Up @@ -96,7 +97,7 @@ instance Monoid Buffer where
mconcat xs = foldl1' mappend xs

pappend :: Buffer -> ByteString -> Buffer
pappend (Buf _ _ _ 0 _) (PS fp off len) = Buf fp off len 0 0
pappend (Buf _ _ _ 0 _) bs = buffer bs
pappend buf (PS fp off len) = append buf fp off len

append :: Buffer -> ForeignPtr a -> Int -> Int -> Buffer
Expand Down
5 changes: 3 additions & 2 deletions Data/Attoparsec/Text/Buffer.hs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ import GHC.ST (ST(..), runST)
import Prelude hiding (length)
import qualified Data.Text.Array as A

-- If _cap is zero, this buffer is empty.
data Buffer = Buf {
_arr :: {-# UNPACK #-} !A.Array
, _off :: {-# UNPACK #-} !Int
Expand Down Expand Up @@ -82,8 +83,8 @@ instance Monoid Buffer where
mconcat xs = foldl1' mappend xs

pappend :: Buffer -> Text -> Buffer
pappend (Buf _ _ _ 0 _) (Text arr off len) = Buf arr off len 0 0
pappend buf (Text arr off len) = append buf arr off len
pappend (Buf _ _ _ 0 _) t = buffer t
pappend buf (Text arr off len) = append buf arr off len

append :: Buffer -> A.Array -> Int -> Int -> Buffer
append (Buf arr0 off0 len0 cap0 gen0) !arr1 !off1 !len1 = runST $ do
Expand Down

0 comments on commit 3c47ecd

Please sign in to comment.