UNPKG

5.96 kBtext/coffeescriptView Raw
1chai = require 'chai'
2assert = chai.assert
3should = chai.should()
4ReadStream = require '../lib/read-stream'
5consts = require '../lib/consts'
6Memdown = require 'memdown-sync'
7#LevelDown = require 'leveldown-sync'
8
9FILTER_INCLUDED = consts.FILTER_INCLUDED
10FILTER_EXCLUDED = consts.FILTER_EXCLUDED
11FILTER_STOPPED = consts.FILTER_STOPPED
12
13fillChar = (c, len=2) ->
14 result = ''
15 len++
16 while len -= 1
17 result += c
18 result
19toFixedInt = (value, digits=2)->
20 result = fillChar 0, digits
21 (result+value).slice(-digits)
22
23
24allData = {}
25for k in [0..99]
26 allData[toFixedInt(k, 2)] = Math.random().toString()
27initTestDB = ->
28 db = Memdown()
29 #db = LevelDown('tempdb')
30 db.open()
31 for k,v of allData
32 db.put(k, v)
33 db
34
35describe "ReadStream", ->
36 db = initTestDB()
37 it "test readable-stream@1.0.x", ->
38 ###
39 this is here to be an explicit reminder that we're tied to
40 readable-stream@1.0.x so if someone comes along and wants
41 to bump version they'll have to come here and read that we're
42 using Streams2 explicitly across Node versions and will
43 probably delay Streams3 adoption until Node 0.12 is released
44 as readable-stream@1.1.x causes some problems with downstream
45 modules
46 see: https://github.com/rvagg/node-levelup/issues/216
47 ###
48 assert (/^~1\.0\.\d+$/).test(require('../package.json').dependencies['readable-stream'])
49 , 'using readable-stream@1.0.x'
50
51 describe ".create", ->
52 it "should create a ReadStream via db argument", ->
53 stream = ReadStream(db)
54 should.exist stream, "stream"
55 stream.should.be.instanceOf ReadStream
56 should.exist stream._iterator, "iterator should be exists"
57 it "should create a ReadStream via options.db", ->
58 stream = ReadStream(null, {db:db})
59 should.exist stream
60 stream.should.be.instanceOf ReadStream
61 should.exist stream._iterator, "iterator should be exists"
62 describe ".read", ->
63 it "should read all data through database", (done)->
64 data = {}
65 stream = ReadStream(db, {keyAsBuffer: false, ValueAsBuffer: false})
66 stream.on "data", (item)->
67 data[item.key] = item.value
68 stream.on "error", (err)->
69 done(err)
70 stream.on "end", ()->
71 assert.deepEqual data, allData
72 done()
73 it "should be limited the count", (done)->
74 total = 2
75 count = 0
76 stream = ReadStream db,
77 limit: total
78 keyAsBuffer: false
79 ValueAsBuffer: false
80 stream.on "data", (item)->
81 count++
82 .on "error", (err)->
83 done(err)
84 .on "end", ()->
85 assert.equal count, total
86 done()
87 it "should be key greater than 3 and less equal than 60 through database", (done)->
88 count = 0
89 stream = ReadStream db,
90 gt: "03"
91 lte: "60"
92 keyAsBuffer: false
93 ValueAsBuffer: false
94 stream.on "data", (item)->
95 item.key.should.be.gt(3).and.lte(60)
96 count++
97 .on "error", (err)->
98 done(err)
99 .on "end", ()->
100 assert.equal count, 60-3
101 done()
102 it "should match data through database", (done)->
103 count = 0
104 stream = ReadStream db,
105 match: "0*"
106 keyAsBuffer: false
107 ValueAsBuffer: false
108 stream.on "data", (item)->
109 item.key.should.be.gte(0).and.lte(9)
110 count++
111 .on "error", (err)->
112 done(err)
113 .on "end", ()->
114 assert.equal count, 10
115 done()
116 it "should filter data through database", (done)->
117 count = 0
118 data = {}
119 stream = ReadStream db,
120 keyAsBuffer: false
121 ValueAsBuffer: false
122 filter: (k,v)->
123 if k % 2 is 0
124 return FILTER_EXCLUDED
125 count++
126 return FILTER_STOPPED if count > 10
127 stream.on "data", (item)->
128 data[item.key] = item.value
129 stream.on "error", (err)->
130 done(err)
131 stream.on "end", ()->
132 keys = Object.keys(data)
133 assert.equal keys.length, count
134 assert.equal count, 11
135 for k,v of data
136 assert.ok k % 2 is 1, "key should be odd"
137 assert.equal v, allData[k]
138 done()
139 it "should be next/last", (done)->
140 count = 0
141 lastKey = null
142 pageCount = 0
143 nextPage = (db, aLastKey, aPageSize, cb)->
144 pageData = []
145 ReadStream db,
146 next: aLastKey
147 limit: aPageSize
148 keyAsBuffer: false
149 ValueAsBuffer: false
150 .on "last", (aLastKey)->
151 lastKey = aLastKey
152 .on "data", (item)->
153 item.key.should.be.gt(aLastKey) if aLastKey
154 pageData.push item
155 .on "error", (err)->
156 done(err)
157 .on "end", ()->
158 pageCount++
159 assert.equal pageData.length, aPageSize if pageCount < 50
160 cb(pageData) if cb
161 runNext = ->
162 if lastKey and pageCount <= 50
163 nextPage db, lastKey, 2, (data)->
164 lastId = (pageCount-1)*2+1
165 lastKey.should.be.equal toFixedInt(lastId,2) if lastKey
166 if data.length
167 data.should.be.deep.equal [
168 {key: toFixedInt(lastId-1), value: allData[toFixedInt(lastId-1)]}
169 {key: toFixedInt(lastId), value: allData[toFixedInt(lastId)]}
170 ]
171 else
172 pageCount.should.be.equal 51
173 should.not.exist lastKey
174 runNext()
175 else
176 pageCount.should.be.equal 51
177 should.not.exist lastKey
178 done()
179 nextPage db, lastKey, 2, (data)->
180 lastId = (pageCount-1)*2+1
181 #console.log "p=",pageCount, toFixedInt(lastId,2), lastKey
182 lastKey.should.be.equal toFixedInt(lastId,2)
183 data.should.be.deep.equal [
184 {key: toFixedInt(lastId-1), value: allData[toFixedInt(lastId-1)]}
185 {key: toFixedInt(lastId), value: allData[toFixedInt(lastId)]}
186 ]
187 runNext()
188