Finish implementing version 1.0 and add documentation
Some checks failed
/ deploy (push) Failing after 5s
Some checks failed
/ deploy (push) Failing after 5s
This commit is contained in:
parent
85e33094cd
commit
895b9976ff
12 changed files with 617 additions and 90 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -130,3 +130,4 @@ dist
|
|||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
test/temp/-*
|
||||
|
|
185
README.md
185
README.md
|
@ -1,3 +1,186 @@
|
|||
# fs-cache-fast
|
||||
|
||||
Cache stored onto the file system
|
||||
fs-cache-fast is a small, no dependancy, both promise and sync based file system cache storage.
|
||||
This package is designed to be a smaller, lighter, drop-in replacement for [file-system-cache](https://www.npmjs.com/package/file-system-cache).
|
||||
|
||||
# Installation
|
||||
|
||||
Install with npm:
|
||||
|
||||
```bash
|
||||
$ npm install --save fs-cache-fast
|
||||
```
|
||||
|
||||
# Getting started
|
||||
|
||||
The api is extremely simple:
|
||||
|
||||
```javascript
|
||||
import Cache from 'fs-cache-fast'
|
||||
|
||||
let cache = new Cache()
|
||||
|
||||
cache.setSync('my-key', { value: 'here' })
|
||||
let item = cache.getSync('my-key', { fallback: 'here' })
|
||||
|
||||
await cache.set('another-key', 'Hi there')
|
||||
let result = await cache.get('another-key')
|
||||
```
|
||||
|
||||
# Api
|
||||
|
||||
### new Cache(options)
|
||||
|
||||
Create a new cache with the specified directory (if directory is skipped, it randomly generates one in fs.tmp on each run).
|
||||
|
||||
Possible values in options:
|
||||
|
||||
```javascript
|
||||
{
|
||||
prefix: 'myprefix', // Add a prefix to every cached filename that is generated
|
||||
ns: 'myprefix', // Alternative name for prefix, for API compatibility with file-system-cache
|
||||
hash_alg: 'sha256', // Use the specified hashing algorithm that is used to generate the filename
|
||||
cache_dir: '/tmp/MY_CACHE', // The directory where all the cache gets stored, gets auto-created if not exist.
|
||||
ttl: 60, // Expiration in seconds for each cache item.
|
||||
}
|
||||
```
|
||||
|
||||
The default options are as follow:
|
||||
|
||||
```javascript
|
||||
{
|
||||
prefix: '-',
|
||||
hash_alg: 'md5',
|
||||
cache_dir: path.join(os.tmpdir(), /* random id */),
|
||||
ttl: 0,
|
||||
}
|
||||
```
|
||||
|
||||
### cache.get(key, fallback = null)
|
||||
|
||||
Promise get the cache value that exists with item `key` and if it doesn't exist or has expired, returns the fallback value instead.
|
||||
|
||||
```javascript
|
||||
let myCache = await cache.get('mykey', null)
|
||||
```
|
||||
|
||||
### cache.getSync(key, fallback = null)
|
||||
|
||||
Immediately get the cache value that exists with item `key` and if it doesn't exist or has expired, returns the fallback value instead.
|
||||
|
||||
```javascript
|
||||
let myCache = cache.getSync('mykey', null)
|
||||
```
|
||||
|
||||
### cache.set(key, content, ttl | { ttl: number } = null)
|
||||
|
||||
Promise store the content as cache with the specified key (with optional overwriting default ttl set on the cache constructor).
|
||||
|
||||
```javascript
|
||||
await cache.set('mykey', { hello: 'world' })
|
||||
```
|
||||
|
||||
### cache.setSync(key, content, ttl | { ttl: number } = null)
|
||||
|
||||
Immediately store the content as cache with the specified key (with optional overwriting default ttl set on the cache constructor).
|
||||
|
||||
```javascript
|
||||
cache.setSync('mykey', { hello: 'world' }, 5 * 60) // Expire this after 5 minutes
|
||||
```
|
||||
|
||||
### cache.setMany(items, ttl | { ttl: number } = null)
|
||||
### cache.save(items, ttl | { ttl: number } = null)
|
||||
|
||||
Promise store multiple items all at once while optionally overwriting the ttl for these entries.
|
||||
Items take form of an array of json objects with the following signature: `{ key, content }`
|
||||
|
||||
**Note**, for backwards compatibility with `file-system-cache` you can also use the property `value` instead of `content`.
|
||||
|
||||
```javascript
|
||||
await cache.setMany([
|
||||
{ key: 'one', content: 'Store this' },
|
||||
{ key: 'two', content: { a: 'and also this' } },
|
||||
])
|
||||
|
||||
await cache.save([
|
||||
{ key: 'one', value: 'Store this' },
|
||||
{ key: 'two', value: { a: 'and also this' } },
|
||||
])
|
||||
```
|
||||
|
||||
### cache.setManySync(items, ttl | { ttl: number } = null)
|
||||
### cache.saveSync(items, ttl | { ttl: number } = null)
|
||||
|
||||
Immediately store multiple items all at once while optionally overwriting the ttl for these entries.
|
||||
Items take form of an array of json objects with the following signature: `{ key, content }`
|
||||
|
||||
**Note**, for backwards compatibility with `file-system-cache` you can also use the property `value` instead of `content`.
|
||||
|
||||
**Note**, there's an alternative name for it called `.saveSync(...)` for retaining similar naming schema as `file-system-cache` (it does not provide this functionality).
|
||||
|
||||
```javascript
|
||||
cache.setManySync([
|
||||
{ key: 'one', content: 'Store this' },
|
||||
{ key: 'two', content: { a: 'and also this' } },
|
||||
], 10 * 60) // Expire all of these after 10 minutes.
|
||||
|
||||
cache.saveSync([
|
||||
{ key: 'one', value: 'Store this' },
|
||||
{ key: 'two', value: { a: 'and also this' } },
|
||||
], 10 * 60) // Expire all of these after 10 minutes.
|
||||
```
|
||||
|
||||
### cache.remove(key)
|
||||
|
||||
Promise remove a cache with the specified key.
|
||||
|
||||
```javascript
|
||||
await cache.remove('mykey')
|
||||
```
|
||||
|
||||
### cache.removeSync(key)
|
||||
|
||||
Immediately remove a cache with the specified key.
|
||||
|
||||
```javascript
|
||||
cache.removeSync('mykey')
|
||||
```
|
||||
|
||||
### cache.clear()
|
||||
|
||||
Promise remove all items in the cache directory that match the specified `prefix` or `ns` if you will.
|
||||
|
||||
```javascript
|
||||
await cache.clear()
|
||||
```
|
||||
|
||||
### cache.clearSync()
|
||||
|
||||
Immediately remove all items in the cache directory that match the specified `prefix` or `ns` if you will.
|
||||
|
||||
```javascript
|
||||
cache.clearSync()
|
||||
```
|
||||
|
||||
### cache.getAll()
|
||||
|
||||
Promise return all items currently residing in the cache that have valid ttl.
|
||||
This returns an array of objects, each one with with following signature: `{ key, content, ttl }`
|
||||
|
||||
```javascript
|
||||
let items = await cache.getAll()
|
||||
// items[0] = { key: 'one', content: 'Store this' }
|
||||
// items[1] = { key: 'two', content: { a: 'and also this' } }
|
||||
```
|
||||
|
||||
### cache.load()
|
||||
|
||||
Promise return all items currently residing in the cache that have valid ttl.
|
||||
This is an API compatible version with `file-system-cache` and returns the results slightly different to maintain compatibility.
|
||||
Returns an object with key files that has an array of items with this signature: `{ path, value, key }`
|
||||
|
||||
```javascript
|
||||
let items = await cache.load()
|
||||
// items.files[0] = { path: '...', value: 'Store this', key: 'one' }
|
||||
// items.files[1] = { path: '...', value: { a: 'and also this' }, key: 'two' }
|
||||
```
|
123
index.mjs
123
index.mjs
|
@ -10,10 +10,10 @@ export default class FSCache {
|
|||
this.fsPromises = fsPromises || fsPromisesOriginal
|
||||
|
||||
this.id = crypto.randomBytes(15).toString('base64').replace(/\//g, '-')
|
||||
this.parse_json = options.parse_json ?? true
|
||||
this.prefix = options.prefix ? options.prefix + '-' : ''
|
||||
this.prefix = options.prefix ? options.prefix + '-' : '-'
|
||||
this.hash_alg = options.hash_alg || 'md5'
|
||||
this.cache_dir = options.cache_dir || path.join(os.tmpdir(), this.id)
|
||||
this.ttl = options.ttl || 0
|
||||
|
||||
// Verify hash algorithm is supported on this system
|
||||
crypto.createHash(this.hash_alg)
|
||||
|
@ -21,25 +21,37 @@ export default class FSCache {
|
|||
this.fsSync.mkdirSync(this.cache_dir, { recursive: true })
|
||||
}
|
||||
|
||||
_parseCacheData(data, overwrite = {}) {
|
||||
return overwrite.parse_json ?? this.parse_json ? JSON.parse(data) : data
|
||||
_checkIsExpired(parsed, now) {
|
||||
return parsed.ttl != null && now > parsed.ttl
|
||||
}
|
||||
|
||||
_parseSetData(data, overwrite = {}) {
|
||||
return overwrite.parse_json ?? this.parse_json ? JSON.stringify(data) : data
|
||||
_parseCacheData(data, fallback, overwrite = {}) {
|
||||
let parsed = JSON.parse(data)
|
||||
if (this._checkIsExpired(parsed, new Date().getTime())) {
|
||||
return fallback || null
|
||||
}
|
||||
return parsed.content
|
||||
}
|
||||
|
||||
_parseSetData(key, data, overwrite = {}) {
|
||||
if (!(overwrite.ttl ?? this.ttl)) {
|
||||
return JSON.stringify({ key: key, content: data })
|
||||
}
|
||||
return JSON.stringify({
|
||||
key: key,
|
||||
content: data,
|
||||
ttl: new Date().getTime() + (overwrite.ttl || this.ttl) * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
hash(name) {
|
||||
return crypto.hash(this.hash_alg, name)
|
||||
return path.join(this.cache_dir, this.prefix + crypto.hash(this.hash_alg, name))
|
||||
}
|
||||
|
||||
get(name, fallback, opts) {
|
||||
return this.fsPromises.readFile(
|
||||
path.join(this.cache_dir, this.hash(name)),
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
return this.fsPromises.readFile(this.hash(name), { encoding: 'utf8' })
|
||||
.then(
|
||||
data => this._parseCacheData(data, opts),
|
||||
data => this._parseCacheData(data, fallback, opts),
|
||||
err => (fallback)
|
||||
)
|
||||
}
|
||||
|
@ -48,21 +60,20 @@ export default class FSCache {
|
|||
let data;
|
||||
|
||||
try {
|
||||
data = this.fsSync.readFileSync(
|
||||
path.join(this.cache_dir, this.hash(name)),
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
data = this.fsSync.readFileSync(this.hash(name), { encoding: 'utf8' })
|
||||
} catch {
|
||||
return fallback
|
||||
}
|
||||
return this._parseCacheData(data, opts)
|
||||
return this._parseCacheData(data, fallback, opts)
|
||||
}
|
||||
|
||||
set(name, data, opts = {}) {
|
||||
set(name, data, orgOpts = {}) {
|
||||
let opts = typeof orgOpts === 'number' ? { ttl: orgOpts } : orgOpts
|
||||
|
||||
try {
|
||||
return this.fsPromises.writeFile(
|
||||
path.join(this.cache_dir, this.hash(name)),
|
||||
this._parseSetData(data, opts),
|
||||
this.hash(name),
|
||||
this._parseSetData(name, data, opts),
|
||||
{ encoding: opts.encoding || 'utf8' }
|
||||
)
|
||||
} catch (err) {
|
||||
|
@ -70,11 +81,77 @@ export default class FSCache {
|
|||
}
|
||||
}
|
||||
|
||||
setSync(name, data, opts = {}) {
|
||||
async setMany(items, options) {
|
||||
for (let item of items) {
|
||||
await this.set(item.key, item.content ?? item.value, options)
|
||||
}
|
||||
}
|
||||
|
||||
save(items, options) {
|
||||
return this.setMany(items, options)
|
||||
}
|
||||
|
||||
setSync(name, data, orgOpts = {}) {
|
||||
let opts = typeof orgOpts === 'number' ? { ttl: orgOpts } : orgOpts
|
||||
|
||||
this.fsSync.writeFileSync(
|
||||
path.join(this.cache_dir, this.hash(name)),
|
||||
this._parseSetData(data, opts),
|
||||
this.hash(name),
|
||||
this._parseSetData(name, data, opts),
|
||||
{ encoding: opts.encoding || 'utf8' }
|
||||
)
|
||||
}
|
||||
|
||||
setManySync(items, options) {
|
||||
for (let item of items) {
|
||||
this.setSync(item.key, item.content ?? item.value, options)
|
||||
}
|
||||
}
|
||||
|
||||
saveSync(items, options) {
|
||||
return this.setManySync(items, options)
|
||||
}
|
||||
|
||||
remove(name) {
|
||||
return this.fsPromises.rm(this.hash(name), { force: true })
|
||||
}
|
||||
|
||||
removeSync(name) {
|
||||
return this.fsSync.rmSync(this.hash(name), { force: true })
|
||||
}
|
||||
|
||||
async clear() {
|
||||
for (let file of await this.fsPromises.readdir(this.cache_dir)) {
|
||||
if (!file.startsWith(this.prefix)) continue
|
||||
await this.fsPromises.rm(path.join(this.cache_dir, file), { force: true })
|
||||
}
|
||||
}
|
||||
|
||||
clearSync() {
|
||||
for (let file of this.fsSync.readdirSync(this.cache_dir)) {
|
||||
if (!file.startsWith(this.prefix)) continue
|
||||
this.fsSync.rmSync(path.join(this.cache_dir, file), { force: true })
|
||||
}
|
||||
}
|
||||
|
||||
async getAll() {
|
||||
let out = []
|
||||
let now = new Date().getTime()
|
||||
for (let file of await this.fsPromises.readdir(this.cache_dir)) {
|
||||
if (!file.startsWith(this.prefix)) continue
|
||||
let data = await this.fsPromises.readFile(path.join(this.cache_dir, file), { encoding: 'utf8' })
|
||||
let entry = JSON.parse(data)
|
||||
if (entry.content && !this._checkIsExpired(entry, now)) {
|
||||
out.push(entry)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
load() {
|
||||
return this.getAll().then(res => {
|
||||
return {
|
||||
files: res.map(entry => ({ path: this.hash(entry.key), value: entry.content, key: entry.key }))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
13
package.json
13
package.json
|
@ -9,7 +9,9 @@
|
|||
},
|
||||
"watch": {
|
||||
"test": {
|
||||
"patterns": ["./"],
|
||||
"patterns": [
|
||||
"./"
|
||||
],
|
||||
"extensions": "mjs"
|
||||
}
|
||||
},
|
||||
|
@ -20,6 +22,11 @@
|
|||
"author": "Jonatan Nilsson",
|
||||
"license": "WTFPL",
|
||||
"devDependencies": {
|
||||
"eltro": "^1.4.5"
|
||||
}
|
||||
"eltro": "^1.5.0"
|
||||
},
|
||||
"files": [
|
||||
"index.mjs",
|
||||
"README.md",
|
||||
"LICENSE"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -9,58 +9,90 @@ const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|||
|
||||
let cache = new Cache({ cache_dir: path.join(__dirname, 'temp') })
|
||||
|
||||
t.before(async function() {
|
||||
for (let file of await fs.readdir(cache.cache_dir)) {
|
||||
if (file !== '.gitkeep') {
|
||||
await fs.rm(path.join(cache.cache_dir, file))
|
||||
}
|
||||
}
|
||||
t.before(function() {
|
||||
return cache.clear()
|
||||
})
|
||||
|
||||
t.test('get should work', async function() {
|
||||
const testKey = 'get-test-one'
|
||||
const testData = { a: 1 }
|
||||
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO'), 'HELLO')
|
||||
await fs.writeFile(path.join(cache.cache_dir, cache.hash(testKey)), JSON.stringify({ a: 1 }))
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO'), { a: 1 })
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO', { parse_json: false }), JSON.stringify({ a: 1 }))
|
||||
await fs.writeFile(cache.hash(testKey), cache._parseSetData(testKey, testData))
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO'), testData)
|
||||
})
|
||||
|
||||
t.test('get should work with ttl', async function() {
|
||||
const testKey = 'get-test-two'
|
||||
const testData = { a: 1 }
|
||||
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO'), 'HELLO')
|
||||
await fs.writeFile(cache.hash(testKey), cache._parseSetData(testKey, testData, { ttl: 60 }))
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO'), testData)
|
||||
})
|
||||
|
||||
t.test('get should return fallback with expired ttl', async function() {
|
||||
const testKey = 'get-test-three'
|
||||
const testData = { a: 1 }
|
||||
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO'), 'HELLO')
|
||||
await fs.writeFile(cache.hash(testKey), cache._parseSetData(testKey, testData, { ttl: -1 }))
|
||||
assert.deepStrictEqual(await cache.get(testKey, 'HELLO'), 'HELLO')
|
||||
})
|
||||
|
||||
t.test('getSync should work', function() {
|
||||
const testKey = 'get-sync-test-one'
|
||||
const testData = { b: 2 }
|
||||
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO'), 'HELLO')
|
||||
fsSync.writeFileSync(path.join(cache.cache_dir, cache.hash(testKey)), JSON.stringify({ b: 2 }))
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO'), { b: 2 })
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO', { parse_json: false }), JSON.stringify({ b: 2 }))
|
||||
fsSync.writeFileSync(cache.hash(testKey), cache._parseSetData(testKey, testData))
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO'), testData)
|
||||
})
|
||||
|
||||
t.test('getSync should work with ttl', function() {
|
||||
const testKey = 'get-sync-test-two'
|
||||
const testData = { b: 2 }
|
||||
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO'), 'HELLO')
|
||||
fsSync.writeFileSync(cache.hash(testKey), cache._parseSetData(testKey, testData, { ttl: 60 }))
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO'), testData)
|
||||
})
|
||||
|
||||
t.test('getSync should return fallback with expired ttl', function() {
|
||||
const testKey = 'get-sync-test-three'
|
||||
const testData = { b: 2 }
|
||||
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO'), 'HELLO')
|
||||
fsSync.writeFileSync(cache.hash(testKey), cache._parseSetData(testKey, testData, { ttl: -1 }))
|
||||
assert.deepStrictEqual(cache.getSync(testKey, 'HELLO'), 'HELLO')
|
||||
})
|
||||
|
||||
t.test('set should work', async function() {
|
||||
const testKey = 'set-test-one'
|
||||
const assertPath = path.join(cache.cache_dir, cache.hash(testKey))
|
||||
const assertPath = cache.hash(testKey)
|
||||
|
||||
assert.notOk(fsSync.existsSync(assertPath))
|
||||
await cache.set(testKey, { c: 3 })
|
||||
assert.ok(fsSync.existsSync(assertPath))
|
||||
let content = await fs.readFile(assertPath, { encoding: 'utf8' })
|
||||
assert.strictEqual(content, JSON.stringify({ c: 3 }))
|
||||
assert.strictEqual(content, cache._parseSetData(testKey, { c: 3 }))
|
||||
})
|
||||
|
||||
t.test('set should work', function() {
|
||||
const testKey = 'set-sync-test-one'
|
||||
const assertPath = path.join(cache.cache_dir, cache.hash(testKey))
|
||||
const assertPath = cache.hash(testKey)
|
||||
|
||||
assert.notOk(fsSync.existsSync(assertPath))
|
||||
cache.setSync(testKey, { d: 4 })
|
||||
assert.ok(fsSync.existsSync(assertPath))
|
||||
let content = fsSync.readFileSync(assertPath, { encoding: 'utf8' })
|
||||
assert.strictEqual(content, JSON.stringify({ d: 4 }))
|
||||
assert.strictEqual(content, cache._parseSetData(testKey, { d: 4 }))
|
||||
})
|
||||
|
||||
t.test('should all work together', async function() {
|
||||
const testKey = 'hello world'
|
||||
const assertFallback = 'This is fallback'
|
||||
const assertPath = path.join(cache.cache_dir, cache.hash(testKey))
|
||||
const assertPath = cache.hash(testKey)
|
||||
|
||||
assert.notOk(fsSync.existsSync(assertPath))
|
||||
assert.strictEqual(await cache.get(testKey, assertFallback), assertFallback)
|
||||
|
|
|
@ -35,8 +35,8 @@ t.describe('#constructor()', function() {
|
|||
t.test('comes with default options', function() {
|
||||
let cache = createCache({})
|
||||
assert.ok(cache.id)
|
||||
assert.strictEqual(cache.parse_json, true)
|
||||
assert.strictEqual(cache.prefix, '')
|
||||
assert.strictEqual(cache.ttl, 0)
|
||||
assert.strictEqual(cache.prefix, '-')
|
||||
assert.strictEqual(cache.hash_alg, 'md5')
|
||||
assert.strictEqual(cache.cache_dir, path.join(os.tmpdir(), cache.id))
|
||||
})
|
||||
|
@ -45,20 +45,20 @@ t.describe('#constructor()', function() {
|
|||
const assertHash = 'sha256'
|
||||
const assertDir = '/something/else'
|
||||
const assertPrefix = 'blabla'
|
||||
const assertParseJson = false
|
||||
const assertTtl = 60
|
||||
|
||||
let cache = createCache({
|
||||
prefix: assertPrefix,
|
||||
hash_alg: assertHash,
|
||||
cache_dir: assertDir,
|
||||
parse_json: assertParseJson,
|
||||
ttl: assertTtl,
|
||||
})
|
||||
|
||||
assert.ok(cache.id)
|
||||
assert.strictEqual(cache.parse_json, assertParseJson)
|
||||
assert.strictEqual(cache.prefix, assertPrefix + '-')
|
||||
assert.strictEqual(cache.hash_alg, assertHash)
|
||||
assert.strictEqual(cache.cache_dir, assertDir)
|
||||
assert.strictEqual(cache.ttl, assertTtl)
|
||||
})
|
||||
|
||||
t.test('should create the directory by default', function() {
|
||||
|
@ -80,56 +80,101 @@ t.describe('FSCache', function() {
|
|||
t.describe('#hash()', function() {
|
||||
t.test('should use cache hasher to hash string', function() {
|
||||
let cache = createCache({ hash_alg: 'sha256' })
|
||||
assert.strictEqual(cache.hash('asdf'), crypto.hash('sha256', 'asdf'))
|
||||
assert.strictEqual(cache.hash('asdf'), path.join(cache.cache_dir, '-' + crypto.hash('sha256', 'asdf')))
|
||||
|
||||
cache = createCache({ hash_alg: 'md5' })
|
||||
assert.strictEqual(cache.hash('asdf'), crypto.hash('md5', 'asdf'))
|
||||
assert.strictEqual(cache.hash('asdf'), path.join(cache.cache_dir, '-' + crypto.hash('md5', 'asdf')))
|
||||
})
|
||||
|
||||
t.test('should add prefix if prefix is defined', function() {
|
||||
let cache = createCache({ prefix: 'asdfg', hash_alg: 'md5' })
|
||||
assert.strictEqual(cache.hash('asdf'), path.join(cache.cache_dir, 'asdfg-' + crypto.hash('md5', 'asdf')))
|
||||
})
|
||||
})
|
||||
|
||||
t.describe('#_parseCacheData()', function() {
|
||||
t.test('should default parse as json', function() {
|
||||
let cache = createCache()
|
||||
let output = cache._parseCacheData('{"hello":"world"}')
|
||||
let output = cache._parseCacheData('{"content":{"hello":"world"}}')
|
||||
assert.strictEqual(typeof output, 'object')
|
||||
assert.strictEqual(output.hello, 'world')
|
||||
})
|
||||
|
||||
t.test('can be overwritten in options', function() {
|
||||
let cache = createCache({ parse_json: false })
|
||||
let output = cache._parseCacheData('{"hello":"world"}')
|
||||
assert.strictEqual(typeof output, 'string')
|
||||
assert.strictEqual(output, '{"hello":"world"}')
|
||||
t.test('should return content if not expired', function() {
|
||||
let expiredTime = new Date().getTime() + 1000
|
||||
let cache = createCache()
|
||||
let output = cache._parseCacheData(`{"content":{"hello":"world"},"ttl":${expiredTime}}`)
|
||||
assert.strictEqual(typeof output, 'object')
|
||||
assert.strictEqual(output.hello, 'world')
|
||||
})
|
||||
|
||||
t.test('can be overwritten in parameter', function() {
|
||||
t.test('should return null if ttl expired', function() {
|
||||
let expiredTime = new Date().getTime() - 1
|
||||
let cache = createCache()
|
||||
let output = cache._parseCacheData('{"hello":"world"}', { parse_json: false })
|
||||
assert.strictEqual(typeof output, 'string')
|
||||
assert.strictEqual(output, '{"hello":"world"}')
|
||||
let output = cache._parseCacheData(`{"content":{"hello":"world"},"ttl":${expiredTime}}`)
|
||||
assert.strictEqual(output, null)
|
||||
})
|
||||
|
||||
t.test('should fall back to fallback if ttl expired', function() {
|
||||
const fallback = { a: 1 }
|
||||
let cache = createCache()
|
||||
let output = cache._parseCacheData('{"content":{"hello":"world"},"ttl":0}', fallback)
|
||||
assert.strictEqual(output, fallback)
|
||||
})
|
||||
})
|
||||
|
||||
t.describe('#_parseSetData()', function() {
|
||||
t.test('should default stringify to json', function() {
|
||||
const assertKey = 'mytestkey-1234'
|
||||
|
||||
let cache = createCache()
|
||||
let output = cache._parseSetData({ hello: 'world' })
|
||||
let output = cache._parseSetData(assertKey, { hello: 'world' })
|
||||
assert.strictEqual(typeof output, 'string')
|
||||
assert.strictEqual(output, '{"hello":"world"}')
|
||||
assert.strictEqual(output, `{"key":"${assertKey}","content":{"hello":"world"}}`)
|
||||
})
|
||||
|
||||
t.test('can be overwritten in options', function() {
|
||||
let cache = createCache({ parse_json: false })
|
||||
let output = cache._parseSetData('Hello world')
|
||||
assert.strictEqual(typeof output, 'string')
|
||||
assert.strictEqual(output, 'Hello world')
|
||||
t.test('should include ttl if specified in options', function() {
|
||||
const assertData = { a: 1 }
|
||||
|
||||
let cache = createCache({ ttl: 60 })
|
||||
let output = cache._parseSetData('a', assertData)
|
||||
let back = JSON.parse(output)
|
||||
assert.ok(back.ttl)
|
||||
assert.deepStrictEqual(back.content, assertData)
|
||||
assert.equalWithMargin(new Date().getTime() + 60 * 1000, back.ttl, 1000)
|
||||
})
|
||||
|
||||
t.test('can be overwritten in parameter', function() {
|
||||
t.test('should include ttl if specified in parameters', function() {
|
||||
const assertData = { a: 1 }
|
||||
const assertKey = 'mytestkey-1234'
|
||||
|
||||
let cache = createCache()
|
||||
let output = cache._parseSetData('Hello world', { parse_json: false })
|
||||
assert.strictEqual(typeof output, 'string')
|
||||
assert.strictEqual(output, 'Hello world')
|
||||
let output = cache._parseSetData(assertKey, assertData, { ttl: 60 })
|
||||
let back = JSON.parse(output)
|
||||
assert.ok(back.ttl)
|
||||
assert.strictEqual(back.key, assertKey)
|
||||
assert.deepStrictEqual(back.content, assertData)
|
||||
assert.equalWithMargin(new Date().getTime() + 60 * 1000, back.ttl, 1000)
|
||||
})
|
||||
|
||||
t.test('ttl in parameter should overwrite options', function() {
|
||||
const assertData = { a: 1 }
|
||||
|
||||
let cache = createCache({ ttl: 30 })
|
||||
let output = cache._parseSetData('a', assertData, { ttl: 60 })
|
||||
let back = JSON.parse(output)
|
||||
assert.ok(back.ttl)
|
||||
assert.deepStrictEqual(back.content, assertData)
|
||||
assert.equalWithMargin(new Date().getTime() + 60 * 1000, back.ttl, 1000)
|
||||
})
|
||||
|
||||
t.test('ttl in parameter with 0 should overwrite options with no ttl', function() {
|
||||
const assertData = { a: 1 }
|
||||
|
||||
let cache = createCache({ ttl: 30 })
|
||||
let output = cache._parseSetData('a', assertData, { ttl: 0 })
|
||||
let back = JSON.parse(output)
|
||||
assert.notOk(back.ttl)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -145,21 +190,23 @@ t.describe('FSCache', function() {
|
|||
let output = await cache.get(assertKey)
|
||||
assert.strictEqual(output, assertResult)
|
||||
assert.ok(fsPromises.readFile.called)
|
||||
assert.strictEqual(fsPromises.readFile.firstCall[0], path.join(cache.cache_dir, cache.hash(assertKey)))
|
||||
assert.strictEqual(fsPromises.readFile.firstCall[0], cache.hash(assertKey))
|
||||
assert.strictEqual(fsPromises.readFile.firstCall[1]?.encoding, 'utf8')
|
||||
assert.ok(cache._parseCacheData.called)
|
||||
assert.ok(cache._parseCacheData.firstCall[0], assertContent)
|
||||
})
|
||||
|
||||
t.test('should pass extra options to the parser', async function() {
|
||||
t.test('should pass extra options and fallback to the parser', async function() {
|
||||
const assertOptions = { a: 1 }
|
||||
const assertFallback = { b: 2 }
|
||||
let cache = createCache()
|
||||
cache._parseCacheData = spy()
|
||||
|
||||
await cache.get('asdf', null, assertOptions)
|
||||
await cache.get('asdf', assertFallback, assertOptions)
|
||||
|
||||
assert.ok(cache._parseCacheData.called)
|
||||
assert.ok(cache._parseCacheData.firstCall[1], assertOptions)
|
||||
assert.ok(cache._parseCacheData.firstCall[1], assertFallback)
|
||||
assert.ok(cache._parseCacheData.firstCall[2], assertOptions)
|
||||
})
|
||||
|
||||
t.test('should support fallback value if file does not exist', async function() {
|
||||
|
@ -196,7 +243,7 @@ t.describe('FSCache', function() {
|
|||
let output = cache.getSync(assertKey)
|
||||
assert.strictEqual(output, assertResult)
|
||||
assert.ok(fsSync.readFileSync.called)
|
||||
assert.strictEqual(fsSync.readFileSync.firstCall[0], path.join(cache.cache_dir, cache.hash(assertKey)))
|
||||
assert.strictEqual(fsSync.readFileSync.firstCall[0], cache.hash(assertKey))
|
||||
assert.strictEqual(fsSync.readFileSync.firstCall[1]?.encoding, 'utf8')
|
||||
assert.ok(cache._parseCacheData.called)
|
||||
assert.ok(cache._parseCacheData.firstCall[0], assertContent)
|
||||
|
@ -204,13 +251,15 @@ t.describe('FSCache', function() {
|
|||
|
||||
t.test('should pass extra options to the parser', function() {
|
||||
const assertOptions = { a: 1 }
|
||||
const assertFallback = { b: 2 }
|
||||
let cache = createCache()
|
||||
cache._parseCacheData = spy()
|
||||
|
||||
cache.getSync('asdf', null, assertOptions)
|
||||
cache.getSync('asdf', assertFallback, assertOptions)
|
||||
|
||||
assert.ok(cache._parseCacheData.called)
|
||||
assert.ok(cache._parseCacheData.firstCall[1], assertOptions)
|
||||
assert.ok(cache._parseCacheData.firstCall[1], assertFallback)
|
||||
assert.ok(cache._parseCacheData.firstCall[2], assertOptions)
|
||||
})
|
||||
|
||||
t.test('should support fallback value if file does not exist', function() {
|
||||
|
@ -246,11 +295,12 @@ t.describe('FSCache', function() {
|
|||
await cache.set(assertKey, assertInput)
|
||||
|
||||
assert.ok(fsPromises.writeFile.called)
|
||||
assert.strictEqual(fsPromises.writeFile.firstCall[0], path.join(cache.cache_dir, cache.hash(assertKey)))
|
||||
assert.strictEqual(fsPromises.writeFile.firstCall[0], cache.hash(assertKey))
|
||||
assert.strictEqual(fsPromises.writeFile.firstCall[1], assertContent)
|
||||
assert.strictEqual(fsPromises.writeFile.firstCall[2]?.encoding, 'utf8')
|
||||
assert.ok(cache._parseSetData.called)
|
||||
assert.ok(cache._parseSetData.firstCall[0], assertInput)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[0], assertKey)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[1], assertInput)
|
||||
})
|
||||
|
||||
t.test('should pass extra options to the parser', async function() {
|
||||
|
@ -261,7 +311,18 @@ t.describe('FSCache', function() {
|
|||
await cache.set('asdf', null, assertOptions)
|
||||
|
||||
assert.ok(cache._parseSetData.called)
|
||||
assert.ok(cache._parseSetData.firstCall[1], assertOptions)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[2], assertOptions)
|
||||
})
|
||||
|
||||
t.test('should pass options if number as ttl to the parser', async function() {
|
||||
const assertTtl = 1234
|
||||
let cache = createCache()
|
||||
cache._parseSetData = spy()
|
||||
|
||||
await cache.set('asdf', null, assertTtl)
|
||||
|
||||
assert.ok(cache._parseSetData.called)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[2].ttl, assertTtl)
|
||||
})
|
||||
|
||||
t.test('should pass extra options to the parser', async function() {
|
||||
|
@ -298,11 +359,12 @@ t.describe('FSCache', function() {
|
|||
cache.setSync(assertKey, assertInput)
|
||||
|
||||
assert.ok(fsSync.writeFileSync.called)
|
||||
assert.strictEqual(fsSync.writeFileSync.firstCall[0], path.join(cache.cache_dir, cache.hash(assertKey)))
|
||||
assert.strictEqual(fsSync.writeFileSync.firstCall[0], cache.hash(assertKey))
|
||||
assert.strictEqual(fsSync.writeFileSync.firstCall[1], assertContent)
|
||||
assert.strictEqual(fsSync.writeFileSync.firstCall[2]?.encoding, 'utf8')
|
||||
assert.ok(cache._parseSetData.called)
|
||||
assert.ok(cache._parseSetData.firstCall[0], assertInput)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[0], assertKey)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[1], assertInput)
|
||||
})
|
||||
|
||||
t.test('should pass extra options to the parser', function() {
|
||||
|
@ -313,7 +375,18 @@ t.describe('FSCache', function() {
|
|||
cache.setSync('asdf', null, assertOptions)
|
||||
|
||||
assert.ok(cache._parseSetData.called)
|
||||
assert.ok(cache._parseSetData.firstCall[1], assertOptions)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[2], assertOptions)
|
||||
})
|
||||
|
||||
t.test('should pass options if number as ttl to the parser', function() {
|
||||
const assertTtl = 1234
|
||||
let cache = createCache()
|
||||
cache._parseSetData = spy()
|
||||
|
||||
cache.setSync('asdf', null, assertTtl)
|
||||
|
||||
assert.ok(cache._parseSetData.called)
|
||||
assert.strictEqual(cache._parseSetData.firstCall[2].ttl, assertTtl)
|
||||
})
|
||||
|
||||
t.test('should pass extra options to the parser', function() {
|
||||
|
@ -336,4 +409,159 @@ t.describe('FSCache', function() {
|
|||
}, assertError)
|
||||
})
|
||||
})
|
||||
|
||||
const setManyFunctions = ['setMany', 'save']
|
||||
|
||||
setManyFunctions.forEach(function (fnName) {
|
||||
t.describe(`#${fnName}()`, function () {
|
||||
t.test('should set many at once', async function() {
|
||||
const assertItem1 = { a: 1 }
|
||||
const assertItem2 = { b: 2 }
|
||||
const assertOptions = { opt: true }
|
||||
|
||||
let cache = createCache()
|
||||
cache.set = spy().resolves()
|
||||
|
||||
await cache[fnName]([
|
||||
{ key: 'item1', value: assertItem1 },
|
||||
{ key: 'item2', content: assertItem2 },
|
||||
], assertOptions)
|
||||
|
||||
assert.strictEqual(cache.set.callCount, 2)
|
||||
assert.strictEqual(cache.set.getCallN(1)[0], 'item1')
|
||||
assert.strictEqual(cache.set.getCallN(1)[1], assertItem1)
|
||||
assert.strictEqual(cache.set.getCallN(1)[2], assertOptions)
|
||||
assert.strictEqual(cache.set.getCallN(2)[0], 'item2')
|
||||
assert.strictEqual(cache.set.getCallN(2)[1], assertItem2)
|
||||
assert.strictEqual(cache.set.getCallN(2)[2], assertOptions)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const setManySyncFunctions = ['setManySync', 'saveSync']
|
||||
|
||||
setManySyncFunctions.forEach(function (fnName) {
|
||||
t.describe(`#${fnName}()`, function () {
|
||||
t.test('should setSync many at once', function() {
|
||||
const assertItem1 = { a: 1 }
|
||||
const assertItem2 = { b: 2 }
|
||||
const assertOptions = { opt: true }
|
||||
|
||||
let cache = createCache()
|
||||
cache.setSync = spy().resolves()
|
||||
|
||||
cache[fnName]([
|
||||
{ key: 'item1', value: assertItem1 },
|
||||
{ key: 'item2', content: assertItem2 },
|
||||
], assertOptions)
|
||||
|
||||
assert.strictEqual(cache.setSync.callCount, 2)
|
||||
assert.strictEqual(cache.setSync.getCallN(1)[0], 'item1')
|
||||
assert.strictEqual(cache.setSync.getCallN(1)[1], assertItem1)
|
||||
assert.strictEqual(cache.setSync.getCallN(1)[2], assertOptions)
|
||||
assert.strictEqual(cache.setSync.getCallN(2)[0], 'item2')
|
||||
assert.strictEqual(cache.setSync.getCallN(2)[1], assertItem2)
|
||||
assert.strictEqual(cache.setSync.getCallN(2)[2], assertOptions)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.describe('#remove()', function() {
|
||||
t.test('it should call promise rm', async function () {
|
||||
const assertKey = 'asdf1234'
|
||||
|
||||
let cache = createCache()
|
||||
await cache.remove(assertKey)
|
||||
|
||||
assert.ok(fsPromises.rm.called)
|
||||
assert.strictEqual(fsPromises.rm.firstCall[0], cache.hash(assertKey))
|
||||
assert.deepStrictEqual(fsPromises.rm.firstCall[1], { force: true })
|
||||
})
|
||||
})
|
||||
|
||||
t.describe('#removeSync()', function() {
|
||||
t.test('it should call sync rm', async function () {
|
||||
const assertKey = 'asdf1234'
|
||||
|
||||
let cache = createCache()
|
||||
await cache.removeSync(assertKey)
|
||||
|
||||
assert.ok(fsSync.rmSync.called)
|
||||
assert.strictEqual(fsSync.rmSync.firstCall[0], cache.hash(assertKey))
|
||||
assert.deepStrictEqual(fsSync.rmSync.firstCall[1], { force: true })
|
||||
})
|
||||
})
|
||||
|
||||
t.describe('#clear()', function() {
|
||||
t.test('it should call promises readdir and rm on every item with same prefix', async function () {
|
||||
let files = ['.gitkeep', '-asdf', '-temp', 'hello.world']
|
||||
fsPromises.readdir.resolves(files)
|
||||
|
||||
let cache = createCache()
|
||||
await cache.clear()
|
||||
|
||||
assert.ok(fsPromises.readdir.called)
|
||||
assert.strictEqual(fsPromises.readdir.firstCall[0], cache.cache_dir)
|
||||
assert.strictEqual(fsPromises.rm.callCount, 2)
|
||||
assert.strictEqual(fsPromises.rm.getCallN(1)[0], path.join(cache.cache_dir, '-asdf'))
|
||||
assert.deepStrictEqual(fsPromises.rm.getCallN(1)[1], { force: true })
|
||||
assert.strictEqual(fsPromises.rm.getCallN(2)[0], path.join(cache.cache_dir, '-temp'))
|
||||
assert.deepStrictEqual(fsPromises.rm.getCallN(2)[1], { force: true })
|
||||
})
|
||||
})
|
||||
|
||||
t.describe('#clearSync()', function() {
|
||||
t.test('it should call sync readdirSync and rmSync on every item with same prefix', function () {
|
||||
let files = ['.gitkeep', '-asdf', '-temp', 'hello.world']
|
||||
fsSync.readdirSync.returns(files)
|
||||
|
||||
let cache = createCache()
|
||||
cache.clearSync()
|
||||
|
||||
assert.ok(fsSync.readdirSync.called)
|
||||
assert.strictEqual(fsSync.readdirSync.firstCall[0], cache.cache_dir)
|
||||
assert.strictEqual(fsSync.rmSync.callCount, 2)
|
||||
assert.strictEqual(fsSync.rmSync.getCallN(1)[0], path.join(cache.cache_dir, '-asdf'))
|
||||
assert.deepStrictEqual(fsSync.rmSync.getCallN(1)[1], { force: true })
|
||||
assert.strictEqual(fsSync.rmSync.getCallN(2)[0], path.join(cache.cache_dir, '-temp'))
|
||||
assert.deepStrictEqual(fsSync.rmSync.getCallN(2)[1], { force: true })
|
||||
})
|
||||
})
|
||||
|
||||
const getManyFunctions = ['getAll', 'load']
|
||||
|
||||
getManyFunctions.forEach(function (fnName) {
|
||||
t.describe(`#${fnName}()`, function () {
|
||||
t.test('should read each file in folder and return results', async function() {
|
||||
let files = ['.gitkeep', '-asdf', '-temp', '-derp', 'hello.world']
|
||||
let expired = new Date().getTime() - 1
|
||||
fsPromises.readdir.resolves(files)
|
||||
fsPromises.readFile.onCallN(1).resolves(JSON.stringify({ key: 'a', content: { a: 1 } }))
|
||||
fsPromises.readFile.onCallN(2).resolves(JSON.stringify({ key: 'b', content: { b: 2 } }))
|
||||
fsPromises.readFile.onCallN(3).resolves(JSON.stringify({ key: 'c', content: { c: 3 }, ttl: expired }))
|
||||
|
||||
let cache = createCache()
|
||||
let data = await cache[fnName]()
|
||||
|
||||
if (fnName === 'getAll') {
|
||||
assert.strictEqual(data.length, 2)
|
||||
assert.strictEqual(data[0].key, 'a')
|
||||
assert.deepStrictEqual(data[0].content, { a: 1 })
|
||||
assert.strictEqual(data[1].key, 'b')
|
||||
assert.deepStrictEqual(data[1].content, { b: 2 })
|
||||
} else {
|
||||
assert.strictEqual(data.files.length, 2)
|
||||
assert.strictEqual(data.files[0].path, cache.hash('a'))
|
||||
assert.strictEqual(data.files[0].key, 'a')
|
||||
assert.deepStrictEqual(data.files[0].value, { a: 1 })
|
||||
assert.strictEqual(data.files[1].path, cache.hash('b'))
|
||||
assert.strictEqual(data.files[1].key, 'b')
|
||||
assert.deepStrictEqual(data.files[1].value, { b: 2 })
|
||||
}
|
||||
assert.strictEqual(fsPromises.readFile.getCallN(1)[0], path.join(cache.cache_dir, '-asdf'))
|
||||
assert.strictEqual(fsPromises.readFile.getCallN(2)[0], path.join(cache.cache_dir, '-temp'))
|
||||
assert.strictEqual(fsPromises.readFile.getCallN(3)[0], path.join(cache.cache_dir, '-derp'))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -5,6 +5,8 @@ export function fakeFsSync() {
|
|||
mkdirSync: spy(),
|
||||
readFileSync: spy(),
|
||||
writeFileSync: spy(),
|
||||
rmSync: spy(),
|
||||
readdirSync: spy(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,6 +14,8 @@ export function fakeFsPromises() {
|
|||
return {
|
||||
readFile: spy().resolves(),
|
||||
writeFile: spy().resolves(),
|
||||
rm: spy().resolves(),
|
||||
readdir: spy().resolves([]),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
{"d":4}
|
|
@ -1 +0,0 @@
|
|||
{"b":2}
|
|
@ -1 +0,0 @@
|
|||
{"a":1}
|
|
@ -1 +0,0 @@
|
|||
{"f":6}
|
|
@ -1 +0,0 @@
|
|||
{"c":3}
|
Loading…
Reference in a new issue