@@ -16,7 +16,6 @@ import (
16
16
"github.com/cockroachdb/errors"
17
17
"github.com/cockroachdb/pebble/internal/base"
18
18
"github.com/cockroachdb/pebble/internal/invariants"
19
- "github.com/cockroachdb/pebble/internal/manual"
20
19
"github.com/cockroachdb/pebble/internal/treeprinter"
21
20
"github.com/cockroachdb/pebble/sstable/block"
22
21
)
@@ -382,7 +381,7 @@ func (i *Iter) readEntry() {
382
381
ptr = unsafe .Pointer (uintptr (ptr ) + 5 )
383
382
}
384
383
shared += i .transforms .SyntheticPrefixAndSuffix .PrefixLen ()
385
- unsharedKey := getBytes ( ptr , int (unshared ))
384
+ unsharedKey := unsafe . Slice (( * byte )( ptr ) , int (unshared ))
386
385
// TODO(sumeer): move this into the else block below.
387
386
i .fullKey = append (i .fullKey [:shared ], unsharedKey ... )
388
387
if shared == 0 {
@@ -395,7 +394,7 @@ func (i *Iter) readEntry() {
395
394
i .key = i .fullKey
396
395
}
397
396
ptr = unsafe .Pointer (uintptr (ptr ) + uintptr (unshared ))
398
- i .val = getBytes ( ptr , int (value ))
397
+ i .val = unsafe . Slice (( * byte )( ptr ) , int (value ))
399
398
i .nextOffset = int32 (uintptr (ptr )- uintptr (i .ptr )) + int32 (value )
400
399
}
401
400
@@ -449,7 +448,7 @@ func (i *Iter) readFirstKey() error {
449
448
ptr = unsafe .Pointer (uintptr (ptr ) + 5 )
450
449
}
451
450
452
- firstKey := getBytes ( ptr , int (unshared ))
451
+ firstKey := unsafe . Slice (( * byte )( ptr ) , int (unshared ))
453
452
// Manually inlining base.DecodeInternalKey provides a 5-10% speedup on
454
453
// BlockIter benchmarks.
455
454
if n := len (firstKey ) - 8 ; n >= 0 {
@@ -611,7 +610,7 @@ func (i *Iter) SeekGE(key []byte, flags base.SeekGEFlags) *base.InternalKV {
611
610
612
611
// Manually inlining part of base.DecodeInternalKey provides a 5-10%
613
612
// speedup on BlockIter benchmarks.
614
- s := getBytes ( ptr , int (v1 ))
613
+ s := unsafe . Slice (( * byte )( ptr ) , int (v1 ))
615
614
var k []byte
616
615
if n := len (s ) - 8 ; n >= 0 {
617
616
k = s [:n :n ]
@@ -793,7 +792,7 @@ func (i *Iter) SeekLT(key []byte, flags base.SeekLTFlags) *base.InternalKV {
793
792
794
793
// Manually inlining part of base.DecodeInternalKey provides a 5-10%
795
794
// speedup on BlockIter benchmarks.
796
- s := getBytes ( ptr , int (v1 ))
795
+ s := unsafe . Slice (( * byte )( ptr ) , int (v1 ))
797
796
var k []byte
798
797
if n := len (s ) - 8 ; n >= 0 {
799
798
k = s [:n :n ]
@@ -1240,7 +1239,7 @@ func (i *Iter) nextPrefixV3(succKey []byte) *base.InternalKV {
1240
1239
}
1241
1240
// The trailer is written in little endian, so the key kind is the first
1242
1241
// byte in the trailer that is encoded in the slice [unshared-8:unshared].
1243
- keyKind := base .InternalKeyKind (( * [ manual . MaxArrayLen ] byte )(ptr )[ unshared - 8 ] )
1242
+ keyKind := base .InternalKeyKind (* ( * byte )(unsafe . Pointer ( uintptr ( ptr ) + uintptr ( unshared ) - 8 )) )
1244
1243
keyKind = keyKind & base .InternalKeyKindSSTableInternalObsoleteMask
1245
1244
prefixChanged := false
1246
1245
if keyKind == base .InternalKeyKindSet {
@@ -1334,7 +1333,7 @@ func (i *Iter) nextPrefixV3(succKey []byte) *base.InternalKV {
1334
1333
// - (Unlikely) The prefix has not changed.
1335
1334
// We assemble the key etc. under the assumption that it is the likely
1336
1335
// case.
1337
- unsharedKey := getBytes ( ptr , int (unshared ))
1336
+ unsharedKey := unsafe . Slice (( * byte )( ptr ) , int (unshared ))
1338
1337
// TODO(sumeer): move this into the else block below. This is a bit tricky
1339
1338
// since the current logic assumes we have always copied the latest key
1340
1339
// into fullKey, which is why when we get to the next key we can (a)
@@ -1346,7 +1345,7 @@ func (i *Iter) nextPrefixV3(succKey []byte) *base.InternalKV {
1346
1345
// too. This same comment applies to the other place where we can do this
1347
1346
// optimization, in readEntry().
1348
1347
i .fullKey = append (i .fullKey [:shared ], unsharedKey ... )
1349
- i .val = getBytes ( valuePtr , int (value ))
1348
+ i .val = unsafe . Slice (( * byte )( valuePtr ) , int (value ))
1350
1349
if shared == 0 {
1351
1350
// Provide stability for the key across positioning calls if the key
1352
1351
// doesn't share a prefix with the previous key. This removes requiring the
@@ -1419,7 +1418,7 @@ start:
1419
1418
i .nextOffset = i .offset
1420
1419
e := & i .cached [n ]
1421
1420
i .offset = e .offset
1422
- i .val = getBytes ( unsafe .Pointer (uintptr (i .ptr )+ uintptr (e .valStart )), int (e .valSize ))
1421
+ i .val = unsafe . Slice (( * byte )( unsafe .Pointer (uintptr (i .ptr )+ uintptr (e .valStart ) )), int (e .valSize ))
1423
1422
// Manually inlined version of i.decodeInternalKey(i.key).
1424
1423
i .key = i .cachedBuf [e .keyStart :e .keyEnd ]
1425
1424
if n := len (i .key ) - 8 ; n >= 0 {
@@ -1713,10 +1712,10 @@ func (i *RawIter) readEntry() {
1713
1712
shared , ptr := decodeVarint (ptr )
1714
1713
unshared , ptr := decodeVarint (ptr )
1715
1714
value , ptr := decodeVarint (ptr )
1716
- i .key = append (i .key [:shared ], getBytes ( ptr , int (unshared ))... )
1715
+ i .key = append (i .key [:shared ], unsafe . Slice (( * byte )( ptr ) , int (unshared ))... )
1717
1716
i .key = i .key [:len (i .key ):len (i .key )]
1718
1717
ptr = unsafe .Pointer (uintptr (ptr ) + uintptr (unshared ))
1719
- i .val = getBytes ( ptr , int (value ))
1718
+ i .val = unsafe . Slice (( * byte )( ptr ) , int (value ))
1720
1719
i .nextOffset = int32 (uintptr (ptr )- uintptr (i .ptr )) + int32 (value )
1721
1720
}
1722
1721
@@ -1761,7 +1760,7 @@ func (i *RawIter) SeekGE(key []byte) bool {
1761
1760
// Decode the key at that restart point, and compare it to the key sought.
1762
1761
v1 , ptr := decodeVarint (ptr )
1763
1762
_ , ptr = decodeVarint (ptr )
1764
- s := getBytes ( ptr , int (v1 ))
1763
+ s := unsafe . Slice (( * byte )( ptr ) , int (v1 ))
1765
1764
return i .cmp (key , s ) < 0
1766
1765
})
1767
1766
@@ -1828,7 +1827,7 @@ func (i *RawIter) Prev() bool {
1828
1827
i .nextOffset = i .offset
1829
1828
e := & i .cached [n - 1 ]
1830
1829
i .offset = e .offset
1831
- i .val = getBytes ( unsafe .Pointer (uintptr (i .ptr )+ uintptr (e .valStart )), int (e .valSize ))
1830
+ i .val = unsafe . Slice (( * byte )( unsafe .Pointer (uintptr (i .ptr )+ uintptr (e .valStart ) )), int (e .valSize ))
1832
1831
i .ikey .UserKey = i .cachedBuf [e .keyStart :e .keyEnd ]
1833
1832
i .cached = i .cached [:n ]
1834
1833
return true
@@ -1939,10 +1938,6 @@ func (i *RawIter) Describe(tp treeprinter.Node, fmtKV DescribeKV) {
1939
1938
}
1940
1939
}
1941
1940
1942
- func getBytes (ptr unsafe.Pointer , length int ) []byte {
1943
- return (* [manual .MaxArrayLen ]byte )(ptr )[:length :length ]
1944
- }
1945
-
1946
1941
func decodeVarint (ptr unsafe.Pointer ) (uint32 , unsafe.Pointer ) {
1947
1942
if a := * ((* uint8 )(ptr )); a < 128 {
1948
1943
return uint32 (a ),
0 commit comments