mirror of
https://github.com/advplyr/audiobookshelf.git
synced 2026-02-06 10:19:39 +00:00
Fix:Include Watcher as lib with no dependencies and fix tiny-readdir bug #610
This commit is contained in:
parent
160dac109d
commit
ec6e70725c
34 changed files with 2187 additions and 281 deletions
25
server/libs/watcher/atomically/utils/attemptify.js
Normal file
25
server/libs/watcher/atomically/utils/attemptify.js
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
"use strict";
|
||||
/* IMPORT */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.attemptifySync = exports.attemptifyAsync = void 0;
|
||||
const consts_1 = require("../consts");
|
||||
/* ATTEMPTIFY */
|
||||
//TODO: Maybe publish this as a standalone package
|
||||
//FIXME: The type castings here aren't exactly correct
|
||||
const attemptifyAsync = (fn, onError = consts_1.NOOP) => {
|
||||
return function () {
|
||||
return fn.apply(undefined, arguments).catch(onError);
|
||||
};
|
||||
};
|
||||
exports.attemptifyAsync = attemptifyAsync;
|
||||
const attemptifySync = (fn, onError = consts_1.NOOP) => {
|
||||
return function () {
|
||||
try {
|
||||
return fn.apply(undefined, arguments);
|
||||
}
|
||||
catch (error) {
|
||||
return onError(error);
|
||||
}
|
||||
};
|
||||
};
|
||||
exports.attemptifySync = attemptifySync;
|
||||
42
server/libs/watcher/atomically/utils/fs.js
Normal file
42
server/libs/watcher/atomically/utils/fs.js
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
"use strict";
|
||||
/* IMPORT */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const util_1 = require("util");
|
||||
const attemptify_1 = require("./attemptify");
|
||||
const fs_handlers_1 = require("./fs_handlers");
|
||||
const retryify_1 = require("./retryify");
|
||||
/* FS */
|
||||
const FS = {
|
||||
chmodAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.chmod), fs_handlers_1.default.onChangeError),
|
||||
chownAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.chown), fs_handlers_1.default.onChangeError),
|
||||
closeAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.close)),
|
||||
fsyncAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.fsync)),
|
||||
mkdirAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.mkdir)),
|
||||
realpathAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.realpath)),
|
||||
statAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.stat)),
|
||||
unlinkAttempt: attemptify_1.attemptifyAsync(util_1.promisify(fs.unlink)),
|
||||
closeRetry: retryify_1.retryifyAsync(util_1.promisify(fs.close), fs_handlers_1.default.isRetriableError),
|
||||
fsyncRetry: retryify_1.retryifyAsync(util_1.promisify(fs.fsync), fs_handlers_1.default.isRetriableError),
|
||||
openRetry: retryify_1.retryifyAsync(util_1.promisify(fs.open), fs_handlers_1.default.isRetriableError),
|
||||
readFileRetry: retryify_1.retryifyAsync(util_1.promisify(fs.readFile), fs_handlers_1.default.isRetriableError),
|
||||
renameRetry: retryify_1.retryifyAsync(util_1.promisify(fs.rename), fs_handlers_1.default.isRetriableError),
|
||||
statRetry: retryify_1.retryifyAsync(util_1.promisify(fs.stat), fs_handlers_1.default.isRetriableError),
|
||||
writeRetry: retryify_1.retryifyAsync(util_1.promisify(fs.write), fs_handlers_1.default.isRetriableError),
|
||||
chmodSyncAttempt: attemptify_1.attemptifySync(fs.chmodSync, fs_handlers_1.default.onChangeError),
|
||||
chownSyncAttempt: attemptify_1.attemptifySync(fs.chownSync, fs_handlers_1.default.onChangeError),
|
||||
closeSyncAttempt: attemptify_1.attemptifySync(fs.closeSync),
|
||||
mkdirSyncAttempt: attemptify_1.attemptifySync(fs.mkdirSync),
|
||||
realpathSyncAttempt: attemptify_1.attemptifySync(fs.realpathSync),
|
||||
statSyncAttempt: attemptify_1.attemptifySync(fs.statSync),
|
||||
unlinkSyncAttempt: attemptify_1.attemptifySync(fs.unlinkSync),
|
||||
closeSyncRetry: retryify_1.retryifySync(fs.closeSync, fs_handlers_1.default.isRetriableError),
|
||||
fsyncSyncRetry: retryify_1.retryifySync(fs.fsyncSync, fs_handlers_1.default.isRetriableError),
|
||||
openSyncRetry: retryify_1.retryifySync(fs.openSync, fs_handlers_1.default.isRetriableError),
|
||||
readFileSyncRetry: retryify_1.retryifySync(fs.readFileSync, fs_handlers_1.default.isRetriableError),
|
||||
renameSyncRetry: retryify_1.retryifySync(fs.renameSync, fs_handlers_1.default.isRetriableError),
|
||||
statSyncRetry: retryify_1.retryifySync(fs.statSync, fs_handlers_1.default.isRetriableError),
|
||||
writeSyncRetry: retryify_1.retryifySync(fs.writeSync, fs_handlers_1.default.isRetriableError)
|
||||
};
|
||||
/* EXPORT */
|
||||
exports.default = FS;
|
||||
28
server/libs/watcher/atomically/utils/fs_handlers.js
Normal file
28
server/libs/watcher/atomically/utils/fs_handlers.js
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
"use strict";
|
||||
/* IMPORT */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const consts_1 = require("../consts");
|
||||
/* FS HANDLERS */
|
||||
const Handlers = {
|
||||
isChangeErrorOk: (error) => {
|
||||
const { code } = error;
|
||||
if (code === 'ENOSYS')
|
||||
return true;
|
||||
if (!consts_1.IS_USER_ROOT && (code === 'EINVAL' || code === 'EPERM'))
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
isRetriableError: (error) => {
|
||||
const { code } = error;
|
||||
if (code === 'EMFILE' || code === 'ENFILE' || code === 'EAGAIN' || code === 'EBUSY' || code === 'EACCESS' || code === 'EACCS' || code === 'EPERM')
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
onChangeError: (error) => {
|
||||
if (Handlers.isChangeErrorOk(error))
|
||||
return;
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
/* EXPORT */
|
||||
exports.default = Handlers;
|
||||
16
server/libs/watcher/atomically/utils/lang.js
Normal file
16
server/libs/watcher/atomically/utils/lang.js
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
"use strict";
|
||||
/* LANG */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const Lang = {
|
||||
isFunction: (x) => {
|
||||
return typeof x === 'function';
|
||||
},
|
||||
isString: (x) => {
|
||||
return typeof x === 'string';
|
||||
},
|
||||
isUndefined: (x) => {
|
||||
return typeof x === 'undefined';
|
||||
}
|
||||
};
|
||||
/* EXPORT */
|
||||
exports.default = Lang;
|
||||
45
server/libs/watcher/atomically/utils/retryify.js
Normal file
45
server/libs/watcher/atomically/utils/retryify.js
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
"use strict";
|
||||
/* IMPORT */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retryifySync = exports.retryifyAsync = void 0;
|
||||
const retryify_queue_1 = require("./retryify_queue");
|
||||
/* RETRYIFY */
|
||||
const retryifyAsync = (fn, isRetriableError) => {
|
||||
return function (timestamp) {
|
||||
return function attempt() {
|
||||
return retryify_queue_1.default.schedule().then(cleanup => {
|
||||
return fn.apply(undefined, arguments).then(result => {
|
||||
cleanup();
|
||||
return result;
|
||||
}, error => {
|
||||
cleanup();
|
||||
if (Date.now() >= timestamp)
|
||||
throw error;
|
||||
if (isRetriableError(error)) {
|
||||
const delay = Math.round(100 + (400 * Math.random())), delayPromise = new Promise(resolve => setTimeout(resolve, delay));
|
||||
return delayPromise.then(() => attempt.apply(undefined, arguments));
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
});
|
||||
};
|
||||
};
|
||||
};
|
||||
exports.retryifyAsync = retryifyAsync;
|
||||
const retryifySync = (fn, isRetriableError) => {
|
||||
return function (timestamp) {
|
||||
return function attempt() {
|
||||
try {
|
||||
return fn.apply(undefined, arguments);
|
||||
}
|
||||
catch (error) {
|
||||
if (Date.now() > timestamp)
|
||||
throw error;
|
||||
if (isRetriableError(error))
|
||||
return attempt.apply(undefined, arguments);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
};
|
||||
};
|
||||
exports.retryifySync = retryifySync;
|
||||
58
server/libs/watcher/atomically/utils/retryify_queue.js
Normal file
58
server/libs/watcher/atomically/utils/retryify_queue.js
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
"use strict";
|
||||
/* IMPORT */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const consts_1 = require("../consts");
|
||||
/* RETRYIFY QUEUE */
|
||||
const RetryfyQueue = {
|
||||
interval: 25,
|
||||
intervalId: undefined,
|
||||
limit: consts_1.LIMIT_FILES_DESCRIPTORS,
|
||||
queueActive: new Set(),
|
||||
queueWaiting: new Set(),
|
||||
init: () => {
|
||||
if (RetryfyQueue.intervalId)
|
||||
return;
|
||||
RetryfyQueue.intervalId = setInterval(RetryfyQueue.tick, RetryfyQueue.interval);
|
||||
},
|
||||
reset: () => {
|
||||
if (!RetryfyQueue.intervalId)
|
||||
return;
|
||||
clearInterval(RetryfyQueue.intervalId);
|
||||
delete RetryfyQueue.intervalId;
|
||||
},
|
||||
add: (fn) => {
|
||||
RetryfyQueue.queueWaiting.add(fn);
|
||||
if (RetryfyQueue.queueActive.size < (RetryfyQueue.limit / 2)) { // Active queue not under preassure, executing immediately
|
||||
RetryfyQueue.tick();
|
||||
}
|
||||
else {
|
||||
RetryfyQueue.init();
|
||||
}
|
||||
},
|
||||
remove: (fn) => {
|
||||
RetryfyQueue.queueWaiting.delete(fn);
|
||||
RetryfyQueue.queueActive.delete(fn);
|
||||
},
|
||||
schedule: () => {
|
||||
return new Promise(resolve => {
|
||||
const cleanup = () => RetryfyQueue.remove(resolver);
|
||||
const resolver = () => resolve(cleanup);
|
||||
RetryfyQueue.add(resolver);
|
||||
});
|
||||
},
|
||||
tick: () => {
|
||||
if (RetryfyQueue.queueActive.size >= RetryfyQueue.limit)
|
||||
return;
|
||||
if (!RetryfyQueue.queueWaiting.size)
|
||||
return RetryfyQueue.reset();
|
||||
for (const fn of RetryfyQueue.queueWaiting) {
|
||||
if (RetryfyQueue.queueActive.size >= RetryfyQueue.limit)
|
||||
break;
|
||||
RetryfyQueue.queueWaiting.delete(fn);
|
||||
RetryfyQueue.queueActive.add(fn);
|
||||
fn();
|
||||
}
|
||||
}
|
||||
};
|
||||
/* EXPORT */
|
||||
exports.default = RetryfyQueue;
|
||||
35
server/libs/watcher/atomically/utils/scheduler.js
Normal file
35
server/libs/watcher/atomically/utils/scheduler.js
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
"use strict";
|
||||
/* IMPORT */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/* VARIABLES */
|
||||
const Queues = {};
|
||||
/* SCHEDULER */
|
||||
//TODO: Maybe publish this as a standalone package
|
||||
const Scheduler = {
|
||||
next: (id) => {
|
||||
const queue = Queues[id];
|
||||
if (!queue)
|
||||
return;
|
||||
queue.shift();
|
||||
const job = queue[0];
|
||||
if (job) {
|
||||
job(() => Scheduler.next(id));
|
||||
}
|
||||
else {
|
||||
delete Queues[id];
|
||||
}
|
||||
},
|
||||
schedule: (id) => {
|
||||
return new Promise(resolve => {
|
||||
let queue = Queues[id];
|
||||
if (!queue)
|
||||
queue = Queues[id] = [];
|
||||
queue.push(resolve);
|
||||
if (queue.length > 1)
|
||||
return;
|
||||
resolve(() => Scheduler.next(id));
|
||||
});
|
||||
}
|
||||
};
|
||||
/* EXPORT */
|
||||
exports.default = Scheduler;
|
||||
56
server/libs/watcher/atomically/utils/temp.js
Normal file
56
server/libs/watcher/atomically/utils/temp.js
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
"use strict";
|
||||
/* IMPORT */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const consts_1 = require("../consts");
|
||||
const fs_1 = require("./fs");
|
||||
/* TEMP */
|
||||
//TODO: Maybe publish this as a standalone package
|
||||
const Temp = {
|
||||
store: {},
|
||||
create: (filePath) => {
|
||||
const randomness = `000000${Math.floor(Math.random() * 16777215).toString(16)}`.slice(-6), // 6 random-enough hex characters
|
||||
timestamp = Date.now().toString().slice(-10), // 10 precise timestamp digits
|
||||
prefix = 'tmp-', suffix = `.${prefix}${timestamp}${randomness}`, tempPath = `${filePath}${suffix}`;
|
||||
return tempPath;
|
||||
},
|
||||
get: (filePath, creator, purge = true) => {
|
||||
const tempPath = Temp.truncate(creator(filePath));
|
||||
if (tempPath in Temp.store)
|
||||
return Temp.get(filePath, creator, purge); // Collision found, try again
|
||||
Temp.store[tempPath] = purge;
|
||||
const disposer = () => delete Temp.store[tempPath];
|
||||
return [tempPath, disposer];
|
||||
},
|
||||
purge: (filePath) => {
|
||||
if (!Temp.store[filePath])
|
||||
return;
|
||||
delete Temp.store[filePath];
|
||||
fs_1.default.unlinkAttempt(filePath);
|
||||
},
|
||||
purgeSync: (filePath) => {
|
||||
if (!Temp.store[filePath])
|
||||
return;
|
||||
delete Temp.store[filePath];
|
||||
fs_1.default.unlinkSyncAttempt(filePath);
|
||||
},
|
||||
purgeSyncAll: () => {
|
||||
for (const filePath in Temp.store) {
|
||||
Temp.purgeSync(filePath);
|
||||
}
|
||||
},
|
||||
truncate: (filePath) => {
|
||||
const basename = path.basename(filePath);
|
||||
if (basename.length <= consts_1.LIMIT_BASENAME_LENGTH)
|
||||
return filePath; //FIXME: Rough and quick attempt at detecting ok lengths
|
||||
const truncable = /^(\.?)(.*?)((?:\.[^.]+)?(?:\.tmp-\d{10}[a-f0-9]{6})?)$/.exec(basename);
|
||||
if (!truncable)
|
||||
return filePath; //FIXME: No truncable part detected, can't really do much without also changing the parent path, which is unsafe, hoping for the best here
|
||||
const truncationLength = basename.length - consts_1.LIMIT_BASENAME_LENGTH;
|
||||
return `${filePath.slice(0, -basename.length)}${truncable[1]}${truncable[2].slice(0, -truncationLength)}${truncable[3]}`; //FIXME: The truncable part might be shorter than needed here
|
||||
}
|
||||
};
|
||||
/* INIT */
|
||||
process.on('exit', Temp.purgeSyncAll); // Ensuring purgeable temp files are purged on exit
|
||||
/* EXPORT */
|
||||
exports.default = Temp;
|
||||
Loading…
Add table
Add a link
Reference in a new issue