mirror of
https://github.com/cool-team-official/cool-admin-midway.git
synced 2024-11-01 22:20:30 +08:00
修复默认缓存 过期时间问题
This commit is contained in:
parent
614738f6b1
commit
399bfb783c
@ -4,6 +4,7 @@
|
|||||||
"description": "一个项目用COOL就够了",
|
"description": "一个项目用COOL就够了",
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@cool-midway/cache-manager-fs-hash": "^6.0.0",
|
||||||
"@cool-midway/cloud": "^6.0.0",
|
"@cool-midway/cloud": "^6.0.0",
|
||||||
"@cool-midway/core": "^6.0.6",
|
"@cool-midway/core": "^6.0.6",
|
||||||
"@cool-midway/file": "^6.0.1",
|
"@cool-midway/file": "^6.0.1",
|
||||||
@ -26,7 +27,6 @@
|
|||||||
"@midwayjs/validate": "^3.11.5",
|
"@midwayjs/validate": "^3.11.5",
|
||||||
"@midwayjs/view-ejs": "^3.11.5",
|
"@midwayjs/view-ejs": "^3.11.5",
|
||||||
"axios": "^1.3.6",
|
"axios": "^1.3.6",
|
||||||
"cache-manager-fs-hash": "^1.0.0",
|
|
||||||
"ipip-ipdb": "^0.6.0",
|
"ipip-ipdb": "^0.6.0",
|
||||||
"jsonwebtoken": "^9.0.0",
|
"jsonwebtoken": "^9.0.0",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
|
21
packages/other/cache-manager-fs-hash/LICENSE
Normal file
21
packages/other/cache-manager-fs-hash/LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2017 Roland Starke
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
81
packages/other/cache-manager-fs-hash/README.md
Normal file
81
packages/other/cache-manager-fs-hash/README.md
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
# Node Cache Manager store for Filesystem
|
||||||
|
|
||||||
|
[![Build Status](https://travis-ci.org/rolandstarke/node-cache-manager-fs-hash.svg?branch=master)](https://travis-ci.org/rolandstarke/node-cache-manager-fs-hash)
|
||||||
|
[![dependencies Status](https://david-dm.org/rolandstarke/node-cache-manager-fs-hash/status.svg)](https://david-dm.org/rolandstarke/node-cache-manager-fs-hash)
|
||||||
|
[![npm package](https://img.shields.io/npm/v/cache-manager-fs-hash.svg)](https://www.npmjs.com/package/cache-manager-fs-hash)
|
||||||
|
[![node](https://img.shields.io/node/v/cache-manager-fs-hash.svg)](https://nodejs.org)
|
||||||
|
|
||||||
|
A Filesystem store for the [node-cache-manager](https://github.com/BryanDonovan/node-cache-manager) module
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install cache-manager-fs-hash --save
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
* Saves anything that is `JSON.stringify`-able to disk
|
||||||
|
* Buffers are saved as well (if they reach a certain size they will be stored to separate files)
|
||||||
|
* Works well with the cluster module
|
||||||
|
|
||||||
|
## Usage example
|
||||||
|
|
||||||
|
Here is an example that demonstrates how to implement the Filesystem cache store.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const cacheManager = require('cache-manager');
|
||||||
|
const fsStore = require('cache-manager-fs-hash');
|
||||||
|
|
||||||
|
const diskCache = cacheManager.caching({
|
||||||
|
store: fsStore,
|
||||||
|
options: {
|
||||||
|
path: 'diskcache', //path for cached files
|
||||||
|
ttl: 60 * 60, //time to life in seconds
|
||||||
|
subdirs: true, //create subdirectories to reduce the
|
||||||
|
//files in a single dir (default: false)
|
||||||
|
zip: true, //zip files to save diskspace (default: false)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
|
||||||
|
await diskCache.set('key', 'value');
|
||||||
|
console.log(await diskCache.get('key')); //"value"
|
||||||
|
console.log(await diskCache.ttl('key')); //3600 seconds
|
||||||
|
await diskCache.del('key');
|
||||||
|
console.log(await diskCache.get('key')); //undefined
|
||||||
|
|
||||||
|
|
||||||
|
console.log(await getUserCached(5)); //{id: 5, name: '...'}
|
||||||
|
console.log(await getUserCached(5)); //{id: 5, name: '...'}
|
||||||
|
|
||||||
|
await diskCache.reset();
|
||||||
|
|
||||||
|
function getUserCached(userId) {
|
||||||
|
return diskCache.wrap(userId /* cache key */, function () {
|
||||||
|
return getUser(userId);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getUser(userId) {
|
||||||
|
return {id: userId, name: '...'};
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
||||||
|
```
|
||||||
|
|
||||||
|
## How it works
|
||||||
|
|
||||||
|
The filename is determined by the md5 hash of the `key`. (The `key` is also saved in the file to detect hash collisions. In this case it will just return a cache miss). Writing is performed with .lock files so that multiple instances of the library (e.g. using the cluster module) do not interfere with one another.
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm test
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
cache-manager-fs-hash is licensed under the MIT license.
|
1
packages/other/cache-manager-fs-hash/index.js
Normal file
1
packages/other/cache-manager-fs-hash/index.js
Normal file
@ -0,0 +1 @@
|
|||||||
|
module.exports = require('./src');
|
38
packages/other/cache-manager-fs-hash/package.json
Normal file
38
packages/other/cache-manager-fs-hash/package.json
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
"name": "@cool-midway/cache-manager-fs-hash",
|
||||||
|
"version": "6.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8.0.0"
|
||||||
|
},
|
||||||
|
"description": "file system store for node cache manager",
|
||||||
|
"author": "Roland Starke",
|
||||||
|
"license": "MIT",
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"src/*"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"cache-manager",
|
||||||
|
"storage",
|
||||||
|
"filesystem"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/rolandstarke/node-cache-manager-fs-hash.git"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/rolandstarke/node-cache-manager-fs-hash/issues"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha ./test/**/*.js"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"cache-manager": "^3.2.1",
|
||||||
|
"mocha": "^7.1.1",
|
||||||
|
"rimraf": "^3.0.2"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"lockfile": "^1.0.4"
|
||||||
|
}
|
||||||
|
}
|
261
packages/other/cache-manager-fs-hash/src/index.js
Normal file
261
packages/other/cache-manager-fs-hash/src/index.js
Normal file
@ -0,0 +1,261 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
const path = require('path');
|
||||||
|
const promisify = require('util').promisify;
|
||||||
|
const lockFile = require('lockfile');
|
||||||
|
const jsonFileStore = require('./json-file-store');
|
||||||
|
const wrapCallback = require('./wrap-callback');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* construction of the disk storage
|
||||||
|
* @param {object} [args] options of disk store
|
||||||
|
* @param {string} [args.path] path for cached files
|
||||||
|
* @param {number} [args.ttl] time to life in seconds
|
||||||
|
* @param {boolean} [args.zip] zip content to save diskspace
|
||||||
|
* @todo {number} [args.maxsize] max size in bytes on disk
|
||||||
|
* @param {boolean} [args.subdirs] create subdirectories
|
||||||
|
* @returns {DiskStore}
|
||||||
|
*/
|
||||||
|
exports.create = function (args) {
|
||||||
|
return new DiskStore(args && args.options ? args.options : args);
|
||||||
|
};
|
||||||
|
|
||||||
|
function DiskStore(options) {
|
||||||
|
options = options || {};
|
||||||
|
|
||||||
|
this.options = {
|
||||||
|
path: options.path || './cache', /* path for cached files */
|
||||||
|
ttl: options.ttl, /* time before expiring in seconds */
|
||||||
|
maxsize: options.maxsize || Infinity, /* max size in bytes on disk */
|
||||||
|
subdirs: options.subdirs || false,
|
||||||
|
zip: options.zip || false,
|
||||||
|
lockFile: { //check lock at 0ms 50ms 100ms ... 400ms 1400ms 1450ms... up to 10 seconds, after that just asume the lock is staled
|
||||||
|
wait: 400,
|
||||||
|
pollPeriod: 50,
|
||||||
|
stale: 10 * 1000,
|
||||||
|
retries: 10,
|
||||||
|
retryWait: 600,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// check storage directory for existence (or create it)
|
||||||
|
if (!fs.existsSync(this.options.path)) {
|
||||||
|
fs.mkdirSync(this.options.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* save an entry in store
|
||||||
|
* @param {string} key
|
||||||
|
* @param {*} val
|
||||||
|
* @param {object} [options]
|
||||||
|
* @param {number} options.ttl time to life in seconds
|
||||||
|
* @param {function} [cb]
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
DiskStore.prototype.set = wrapCallback(async function (key, val, options) {
|
||||||
|
key = key + '';
|
||||||
|
const filePath = this._getFilePathByKey(key);
|
||||||
|
|
||||||
|
const ttl = (options && (options.ttl >= 0)) ? +options.ttl : this.options.ttl;
|
||||||
|
const data = {
|
||||||
|
key: key,
|
||||||
|
val: val,
|
||||||
|
};
|
||||||
|
if(ttl>0){
|
||||||
|
data.expireTime = Date.now() + ttl * 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (this.options.subdirs) {
|
||||||
|
//check if subdir exists or create it
|
||||||
|
const dir = path.dirname(filePath);
|
||||||
|
await promisify(fs.access)(dir, fs.constants.W_OK).catch(function () {
|
||||||
|
return promisify(fs.mkdir)(dir).catch(err => {
|
||||||
|
if (err.code !== 'EEXIST') throw err;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this._lock(filePath);
|
||||||
|
await jsonFileStore.write(filePath, data, this.options);
|
||||||
|
} catch (err) {
|
||||||
|
throw err;
|
||||||
|
} finally {
|
||||||
|
await this._unlock(filePath);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
DiskStore.prototype._readFile = async function (key) {
|
||||||
|
key = key + '';
|
||||||
|
const filePath = this._getFilePathByKey(key);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = await jsonFileStore.read(filePath, this.options).catch(async (err) => {
|
||||||
|
if (err.code === 'ENOENT') {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
//maybe the file is currently written to, lets lock it and read again
|
||||||
|
try {
|
||||||
|
await this._lock(filePath);
|
||||||
|
return await jsonFileStore.read(filePath, this.options);
|
||||||
|
} catch (err2) {
|
||||||
|
throw err2;
|
||||||
|
} finally {
|
||||||
|
await this._unlock(filePath);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (data.expireTime <= Date.now()) {
|
||||||
|
//cache expired
|
||||||
|
this.del(key).catch(() => 0 /* ignore */);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (data.key !== key) {
|
||||||
|
//hash collision
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
//file does not exist lets return a cache miss
|
||||||
|
if (err.code === 'ENOENT') {
|
||||||
|
return undefined;
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get an entry from store
|
||||||
|
* @param {string} key
|
||||||
|
* @param {function} [cb]
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
DiskStore.prototype.get = wrapCallback(async function (key) {
|
||||||
|
const data = await this._readFile(key);
|
||||||
|
if (data) {
|
||||||
|
return data.val;
|
||||||
|
} else {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get ttl in seconds for key in store
|
||||||
|
* @param {string} key
|
||||||
|
* @param {function} [cb]
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
DiskStore.prototype.ttl = wrapCallback(async function (key) {
|
||||||
|
const data = await this._readFile(key);
|
||||||
|
if (data) {
|
||||||
|
return (data.expireTime - Date.now()) / 1000;
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* delete entry from cache
|
||||||
|
*/
|
||||||
|
DiskStore.prototype.del = wrapCallback(async function (key) {
|
||||||
|
const filePath = this._getFilePathByKey(key);
|
||||||
|
try {
|
||||||
|
if (this.options.subdirs) {
|
||||||
|
//check if the folder exists to fail faster
|
||||||
|
const dir = path.dirname(filePath);
|
||||||
|
await promisify(fs.access)(dir, fs.constants.W_OK);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this._lock(filePath);
|
||||||
|
await jsonFileStore.delete(filePath, this.options);
|
||||||
|
} catch (err) {
|
||||||
|
//ignore deleting non existing keys
|
||||||
|
if (err.code !== 'ENOENT') {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
await this._unlock(filePath);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* cleanup cache on disk -> delete all files from the cache
|
||||||
|
*/
|
||||||
|
DiskStore.prototype.reset = wrapCallback(async function () {
|
||||||
|
const readdir = promisify(fs.readdir);
|
||||||
|
const stat = promisify(fs.stat);
|
||||||
|
const unlink = promisify(fs.unlink);
|
||||||
|
|
||||||
|
return await deletePath(this.options.path, 2);
|
||||||
|
|
||||||
|
async function deletePath(fileOrDir, maxDeep) {
|
||||||
|
if (maxDeep < 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const stats = await stat(fileOrDir);
|
||||||
|
if (stats.isDirectory()) {
|
||||||
|
const files = await readdir(fileOrDir);
|
||||||
|
for (let i = 0; i < files.length; i++) {
|
||||||
|
await deletePath(path.join(fileOrDir, files[i]), maxDeep - 1);
|
||||||
|
}
|
||||||
|
} else if (stats.isFile() && /[/\\]diskstore-[0-9a-fA-F/\\]+(\.json|-\d\.bin)/.test(fileOrDir)) {
|
||||||
|
//delete the file if it is a diskstore file
|
||||||
|
await unlink(fileOrDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* locks a file so other forks that want to use the same file have to wait
|
||||||
|
* @param {string} filePath
|
||||||
|
* @returns {Promise}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
DiskStore.prototype._lock = function (filePath) {
|
||||||
|
return promisify(lockFile.lock)(
|
||||||
|
filePath + '.lock',
|
||||||
|
JSON.parse(JSON.stringify(this.options.lockFile)) //the options are modified -> create a copy to prevent that
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* unlocks a file path
|
||||||
|
* @type {Function}
|
||||||
|
* @param {string} filePath
|
||||||
|
* @returns {Promise}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
DiskStore.prototype._unlock = function (filePath) {
|
||||||
|
return promisify(lockFile.unlock)(filePath + '.lock');
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns the location where the value should be stored
|
||||||
|
* @param {string} key
|
||||||
|
* @returns {string}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
DiskStore.prototype._getFilePathByKey = function (key) {
|
||||||
|
const hash = crypto.createHash('md5').update(key + '').digest('hex');
|
||||||
|
if (this.options.subdirs) {
|
||||||
|
//create subdirs with the first 3 chars of the hash
|
||||||
|
return path.join(
|
||||||
|
this.options.path,
|
||||||
|
'diskstore-' + hash.substr(0, 3),
|
||||||
|
hash.substr(3),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return path.join(
|
||||||
|
this.options.path,
|
||||||
|
'diskstore-' + hash
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
118
packages/other/cache-manager-fs-hash/src/json-file-store.js
Normal file
118
packages/other/cache-manager-fs-hash/src/json-file-store.js
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
const promisify = require('util').promisify;
|
||||||
|
const fs = require('fs');
|
||||||
|
const zlib = require('zlib');
|
||||||
|
|
||||||
|
exports.write = async function (path, data, options) {
|
||||||
|
const externalBuffers = [];
|
||||||
|
let dataString = JSON.stringify(data, function replacerFunction(k, value) {
|
||||||
|
//Buffers searilize to {data: [...], type: "Buffer"}
|
||||||
|
if (value && value.type === 'Buffer' && value.data && value.data.length >= 1024 /* only save bigger Buffers external, small ones can be inlined */) {
|
||||||
|
const buffer = Buffer.from(value.data);
|
||||||
|
externalBuffers.push({
|
||||||
|
index: externalBuffers.length,
|
||||||
|
buffer: buffer,
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
type: 'ExternalBuffer',
|
||||||
|
index: externalBuffers.length - 1,
|
||||||
|
size: buffer.length,
|
||||||
|
};
|
||||||
|
} else if (value === Infinity || value === -Infinity) {
|
||||||
|
return { type: 'Infinity', sign: Math.sign(value) };
|
||||||
|
} else {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
let zipExtension = '';
|
||||||
|
if (options.zip) {
|
||||||
|
zipExtension = '.gz';
|
||||||
|
dataString = await promisify(zlib.deflate)(dataString);
|
||||||
|
}
|
||||||
|
//save main json file
|
||||||
|
await promisify(fs.writeFile)(path + '.json' + zipExtension, dataString, 'utf8');
|
||||||
|
|
||||||
|
//save external buffers
|
||||||
|
await Promise.all(externalBuffers.map(async function (externalBuffer) {
|
||||||
|
let buffer = externalBuffer.buffer;
|
||||||
|
if (options.zip) {
|
||||||
|
buffer = await promisify(zlib.deflate)(buffer);
|
||||||
|
}
|
||||||
|
await promisify(fs.writeFile)(path + '-' + externalBuffer.index + '.bin' + zipExtension, buffer, 'utf8');
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
exports.read = async function (path, options) {
|
||||||
|
let zipExtension = '';
|
||||||
|
if (options.zip) {
|
||||||
|
zipExtension = '.gz';
|
||||||
|
}
|
||||||
|
|
||||||
|
//read main json file
|
||||||
|
let dataString;
|
||||||
|
if (options.zip) {
|
||||||
|
const compressedData = await promisify(fs.readFile)(path + '.json' + zipExtension);
|
||||||
|
dataString = (await promisify(zlib.unzip)(compressedData)).toString();
|
||||||
|
} else {
|
||||||
|
dataString = await promisify(fs.readFile)(path + '.json' + zipExtension, 'utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const externalBuffers = [];
|
||||||
|
const data = JSON.parse(dataString, function bufferReceiver(k, value) {
|
||||||
|
if (value && value.type === 'Buffer' && value.data) {
|
||||||
|
return Buffer.from(value.data);
|
||||||
|
} else if (value && value.type === 'ExternalBuffer' && typeof value.index === 'number' && typeof value.size === 'number') {
|
||||||
|
//JSON.parse is sync so we need to return a buffer sync, we will fill the buffer later
|
||||||
|
const buffer = Buffer.alloc(value.size);
|
||||||
|
externalBuffers.push({
|
||||||
|
index: +value.index,
|
||||||
|
buffer: buffer,
|
||||||
|
});
|
||||||
|
return buffer;
|
||||||
|
} else if (value && value.type === 'Infinity' && typeof value.sign === 'number') {
|
||||||
|
return Infinity * value.sign;
|
||||||
|
} else {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
//read external buffers
|
||||||
|
await Promise.all(externalBuffers.map(async function (externalBuffer) {
|
||||||
|
|
||||||
|
if (options.zip) {
|
||||||
|
const bufferCompressed = await promisify(fs.readFile)(path + '-' + +externalBuffer.index + '.bin' + zipExtension);
|
||||||
|
const buffer = await promisify(zlib.unzip)(bufferCompressed);
|
||||||
|
buffer.copy(externalBuffer.buffer);
|
||||||
|
} else {
|
||||||
|
const fd = await promisify(fs.open)(path + '-' + +externalBuffer.index + '.bin' + zipExtension, 'r');
|
||||||
|
await promisify(fs.read)(fd, externalBuffer.buffer, 0, externalBuffer.buffer.length, 0);
|
||||||
|
await promisify(fs.close)(fd);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.delete = async function (path, options) {
|
||||||
|
let zipExtension = '';
|
||||||
|
if (options.zip) {
|
||||||
|
zipExtension = '.gz';
|
||||||
|
}
|
||||||
|
|
||||||
|
await promisify(fs.unlink)(path + '.json' + zipExtension);
|
||||||
|
|
||||||
|
//delete binary files
|
||||||
|
try {
|
||||||
|
for (let i = 0; i < Infinity; i++) {
|
||||||
|
await promisify(fs.unlink)(path + '-' + i + '.bin' + zipExtension);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code === 'ENOENT') {
|
||||||
|
// every binary is deleted, we are done
|
||||||
|
} else {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
21
packages/other/cache-manager-fs-hash/src/wrap-callback.js
Normal file
21
packages/other/cache-manager-fs-hash/src/wrap-callback.js
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
/**
|
||||||
|
* adds an callback param to the original function
|
||||||
|
* @param {function} fn
|
||||||
|
* @returns {function}
|
||||||
|
*/
|
||||||
|
module.exports = function wrapCallback(fn) {
|
||||||
|
return function (...args) {
|
||||||
|
let cb;
|
||||||
|
if (typeof args[args.length - 1] === 'function') {
|
||||||
|
cb = args.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
const promise = fn.apply(this, args);
|
||||||
|
|
||||||
|
if (typeof cb === 'function') {
|
||||||
|
promise.then(value => setImmediate(cb, null, value), err => setImmediate(cb, err));
|
||||||
|
}
|
||||||
|
|
||||||
|
return promise;
|
||||||
|
};
|
||||||
|
};
|
Loading…
Reference in New Issue
Block a user