-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathstream_test.go
More file actions
135 lines (110 loc) · 2.98 KB
/
stream_test.go
File metadata and controls
135 lines (110 loc) · 2.98 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
package ssdeep
import (
"bytes"
"io"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestStreamReaderMemoryCache(t *testing.T) {
data := []byte("Hello, this is a small test string")
reader := strings.NewReader(string(data))
sr := newStreamReader(reader, defaultCachedSize, true)
defer sr.Close()
// Read all data
err := sr.ReadAll()
require.NoError(t, err)
require.Equal(t, int64(len(data)), sr.Size())
require.False(t, sr.file != nil, "Should use memory for small data")
// Reset and read back
err = sr.Reset()
require.NoError(t, err)
result, err := io.ReadAll(sr)
require.NoError(t, err)
require.Equal(t, data, result)
}
func TestStreamReaderFileCache(t *testing.T) {
// Create data larger than minCachedSize
dataSize := int(minCachedSize) + 1024
data := make([]byte, dataSize)
for i := range data {
data[i] = byte(i % 256)
}
reader := bytes.NewReader(data)
sr := newStreamReader(reader, minCachedSize, true)
defer sr.Close()
// Read all data
err := sr.ReadAll()
require.NoError(t, err)
require.Equal(t, int64(dataSize), sr.Size())
require.True(t, sr.file != nil, "Should use file for large data")
// Reset and read back
err = sr.Reset()
require.NoError(t, err)
result, err := io.ReadAll(sr)
require.NoError(t, err)
require.Equal(t, data, result)
}
func TestStreamHashWithMemoryCache(t *testing.T) {
data := []byte("The quick brown fox jumps over the lazy dog")
reader := bytes.NewReader(data)
hash, err := Stream(reader)
require.NoError(t, err)
// Compare with Bytes function
expectedHash, err := Bytes(data)
require.NoError(t, err)
require.Equal(t, expectedHash, hash)
}
func TestStreamHashWithFileCache(t *testing.T) {
// Create large data
dataSize := int(defaultCachedSize) + 1024*1024
data := make([]byte, dataSize)
for i := range data {
data[i] = byte(i % 256)
}
reader := bytes.NewReader(data)
hash, err := Stream(reader)
require.NoError(t, err)
// Compare with Bytes function
expectedHash, err := Bytes(data)
require.NoError(t, err)
require.Equal(t, expectedHash, hash)
}
func TestStreamWithCustomCacheSize(t *testing.T) {
data := make([]byte, 256*1024) // 256KB
for i := range data {
data[i] = byte(i % 256)
}
reader := bytes.NewReader(data)
// Use small cache size to force file usage
hash, err := Stream(reader, WithCachedSize(128*1024))
require.NoError(t, err)
// Compare with Bytes function
expectedHash, err := Bytes(data)
require.NoError(t, err)
require.Equal(t, expectedHash, hash)
}
func BenchmarkStreamMemoryCache(b *testing.B) {
data := make([]byte, 64*1024) // 64KB
for i := range data {
data[i] = byte(i % 256)
}
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
reader := bytes.NewReader(data)
_, _ = Stream(reader)
}
}
func BenchmarkStreamFileCache(b *testing.B) {
data := make([]byte, 8*1024*1024) // 8MB
for i := range data {
data[i] = byte(i % 256)
}
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
reader := bytes.NewReader(data)
_, _ = Stream(reader)
}
}