initial commit
This commit is contained in:
10
node_modules/atomically/.editorconfig
generated
vendored
Normal file
10
node_modules/atomically/.editorconfig
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
15
node_modules/atomically/dist/constants.d.ts
generated
vendored
Normal file
15
node_modules/atomically/dist/constants.d.ts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
declare const DEFAULT_ENCODING = "utf8";
|
||||
declare const DEFAULT_FILE_MODE = 438;
|
||||
declare const DEFAULT_FOLDER_MODE = 511;
|
||||
declare const DEFAULT_READ_OPTIONS: {};
|
||||
declare const DEFAULT_WRITE_OPTIONS: {};
|
||||
declare const DEFAULT_USER_UID: number;
|
||||
declare const DEFAULT_USER_GID: number;
|
||||
declare const DEFAULT_TIMEOUT_ASYNC = 7500;
|
||||
declare const DEFAULT_TIMEOUT_SYNC = 1000;
|
||||
declare const IS_POSIX: boolean;
|
||||
declare const IS_USER_ROOT: boolean;
|
||||
declare const LIMIT_BASENAME_LENGTH = 128;
|
||||
declare const LIMIT_FILES_DESCRIPTORS = 10000;
|
||||
declare const NOOP: () => void;
|
||||
export { DEFAULT_ENCODING, DEFAULT_FILE_MODE, DEFAULT_FOLDER_MODE, DEFAULT_READ_OPTIONS, DEFAULT_WRITE_OPTIONS, DEFAULT_USER_UID, DEFAULT_USER_GID, DEFAULT_TIMEOUT_ASYNC, DEFAULT_TIMEOUT_SYNC, IS_POSIX, IS_USER_ROOT, LIMIT_BASENAME_LENGTH, LIMIT_FILES_DESCRIPTORS, NOOP };
|
||||
20
node_modules/atomically/dist/constants.js
generated
vendored
Normal file
20
node_modules/atomically/dist/constants.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
/* IMPORT */
|
||||
import os from 'node:os';
|
||||
import process from 'node:process';
|
||||
/* MAIN */
|
||||
const DEFAULT_ENCODING = 'utf8';
|
||||
const DEFAULT_FILE_MODE = 0o666;
|
||||
const DEFAULT_FOLDER_MODE = 0o777;
|
||||
const DEFAULT_READ_OPTIONS = {};
|
||||
const DEFAULT_WRITE_OPTIONS = {};
|
||||
const DEFAULT_USER_UID = os.userInfo().uid;
|
||||
const DEFAULT_USER_GID = os.userInfo().gid;
|
||||
const DEFAULT_TIMEOUT_ASYNC = 7500;
|
||||
const DEFAULT_TIMEOUT_SYNC = 1000;
|
||||
const IS_POSIX = !!process.getuid;
|
||||
const IS_USER_ROOT = process.getuid ? !process.getuid() : false;
|
||||
const LIMIT_BASENAME_LENGTH = 128; //TODO: Fetch the real limit from the filesystem //TODO: Fetch the whole-path length limit too
|
||||
const LIMIT_FILES_DESCRIPTORS = 10000; //TODO: Fetch the real limit from the filesystem
|
||||
const NOOP = () => { };
|
||||
/* EXPORT */
|
||||
export { DEFAULT_ENCODING, DEFAULT_FILE_MODE, DEFAULT_FOLDER_MODE, DEFAULT_READ_OPTIONS, DEFAULT_WRITE_OPTIONS, DEFAULT_USER_UID, DEFAULT_USER_GID, DEFAULT_TIMEOUT_ASYNC, DEFAULT_TIMEOUT_SYNC, IS_POSIX, IS_USER_ROOT, LIMIT_BASENAME_LENGTH, LIMIT_FILES_DESCRIPTORS, NOOP };
|
||||
15
node_modules/atomically/dist/index.d.ts
generated
vendored
Normal file
15
node_modules/atomically/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/// <reference types="node" />
|
||||
import type { Callback, Data, Encoding, Path, ReadOptions, WriteOptions } from './types';
|
||||
declare function readFile(filePath: Path, options: Encoding | ReadOptions & {
|
||||
encoding: string;
|
||||
}): Promise<string>;
|
||||
declare function readFile(filePath: Path, options?: ReadOptions): Promise<Buffer>;
|
||||
declare function readFileSync(filePath: Path, options: Encoding | ReadOptions & {
|
||||
encoding: string;
|
||||
}): string;
|
||||
declare function readFileSync(filePath: Path, options?: ReadOptions): Buffer;
|
||||
declare function writeFile(filePath: Path, data: Data, callback?: Callback): Promise<void>;
|
||||
declare function writeFile(filePath: Path, data: Data, options?: Encoding | WriteOptions, callback?: Callback): Promise<void>;
|
||||
declare function writeFileSync(filePath: Path, data: Data, options?: Encoding | WriteOptions): void;
|
||||
export { readFile, readFileSync, writeFile, writeFileSync };
|
||||
export type { Encoding, ReadOptions, WriteOptions };
|
||||
196
node_modules/atomically/dist/index.js
generated
vendored
Normal file
196
node_modules/atomically/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,196 @@
|
||||
/* IMPORT */
|
||||
import path from 'node:path';
|
||||
import fs from 'stubborn-fs';
|
||||
import { DEFAULT_ENCODING, DEFAULT_FILE_MODE, DEFAULT_FOLDER_MODE, DEFAULT_READ_OPTIONS, DEFAULT_WRITE_OPTIONS, DEFAULT_USER_UID, DEFAULT_USER_GID, DEFAULT_TIMEOUT_ASYNC, DEFAULT_TIMEOUT_SYNC, IS_POSIX } from './constants.js';
|
||||
import { isException, isFunction, isString, isUndefined } from './utils/lang.js';
|
||||
import Scheduler from './utils/scheduler.js';
|
||||
import Temp from './utils/temp.js';
|
||||
function readFile(filePath, options = DEFAULT_READ_OPTIONS) {
|
||||
if (isString(options))
|
||||
return readFile(filePath, { encoding: options });
|
||||
const timeout = Date.now() + ((options.timeout ?? DEFAULT_TIMEOUT_ASYNC) || -1);
|
||||
return fs.retry.readFile(timeout)(filePath, options);
|
||||
}
|
||||
function readFileSync(filePath, options = DEFAULT_READ_OPTIONS) {
|
||||
if (isString(options))
|
||||
return readFileSync(filePath, { encoding: options });
|
||||
const timeout = Date.now() + ((options.timeout ?? DEFAULT_TIMEOUT_SYNC) || -1);
|
||||
return fs.retry.readFileSync(timeout)(filePath, options);
|
||||
}
|
||||
function writeFile(filePath, data, options, callback) {
|
||||
if (isFunction(options))
|
||||
return writeFile(filePath, data, DEFAULT_WRITE_OPTIONS, options);
|
||||
const promise = writeFileAsync(filePath, data, options);
|
||||
if (callback)
|
||||
promise.then(callback, callback);
|
||||
return promise;
|
||||
}
|
||||
async function writeFileAsync(filePath, data, options = DEFAULT_WRITE_OPTIONS) {
|
||||
if (isString(options))
|
||||
return writeFileAsync(filePath, data, { encoding: options });
|
||||
const timeout = Date.now() + ((options.timeout ?? DEFAULT_TIMEOUT_ASYNC) || -1);
|
||||
let schedulerCustomDisposer = null;
|
||||
let schedulerDisposer = null;
|
||||
let tempDisposer = null;
|
||||
let tempPath = null;
|
||||
let fd = null;
|
||||
try {
|
||||
if (options.schedule)
|
||||
schedulerCustomDisposer = await options.schedule(filePath);
|
||||
schedulerDisposer = await Scheduler.schedule(filePath);
|
||||
const filePathReal = await fs.attempt.realpath(filePath);
|
||||
const filePathExists = !!filePathReal;
|
||||
filePath = filePathReal || filePath;
|
||||
[tempPath, tempDisposer] = Temp.get(filePath, options.tmpCreate || Temp.create, !(options.tmpPurge === false));
|
||||
const useStatChown = IS_POSIX && isUndefined(options.chown);
|
||||
const useStatMode = isUndefined(options.mode);
|
||||
if (filePathExists && (useStatChown || useStatMode)) {
|
||||
const stats = await fs.attempt.stat(filePath);
|
||||
if (stats) {
|
||||
options = { ...options };
|
||||
if (useStatChown) {
|
||||
options.chown = { uid: stats.uid, gid: stats.gid };
|
||||
}
|
||||
if (useStatMode) {
|
||||
options.mode = stats.mode;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!filePathExists) {
|
||||
const parentPath = path.dirname(filePath);
|
||||
await fs.attempt.mkdir(parentPath, {
|
||||
mode: DEFAULT_FOLDER_MODE,
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
fd = await fs.retry.open(timeout)(tempPath, 'w', options.mode || DEFAULT_FILE_MODE);
|
||||
if (options.tmpCreated) {
|
||||
options.tmpCreated(tempPath);
|
||||
}
|
||||
if (isString(data)) {
|
||||
await fs.retry.write(timeout)(fd, data, 0, options.encoding || DEFAULT_ENCODING);
|
||||
}
|
||||
else if (!isUndefined(data)) {
|
||||
await fs.retry.write(timeout)(fd, data, 0, data.length, 0);
|
||||
}
|
||||
if (options.fsync !== false) {
|
||||
if (options.fsyncWait !== false) {
|
||||
await fs.retry.fsync(timeout)(fd);
|
||||
}
|
||||
else {
|
||||
fs.attempt.fsync(fd);
|
||||
}
|
||||
}
|
||||
await fs.retry.close(timeout)(fd);
|
||||
fd = null;
|
||||
if (options.chown && (options.chown.uid !== DEFAULT_USER_UID || options.chown.gid !== DEFAULT_USER_GID)) {
|
||||
await fs.attempt.chown(tempPath, options.chown.uid, options.chown.gid);
|
||||
}
|
||||
if (options.mode && options.mode !== DEFAULT_FILE_MODE) {
|
||||
await fs.attempt.chmod(tempPath, options.mode);
|
||||
}
|
||||
try {
|
||||
await fs.retry.rename(timeout)(tempPath, filePath);
|
||||
}
|
||||
catch (error) {
|
||||
if (!isException(error))
|
||||
throw error;
|
||||
if (error.code !== 'ENAMETOOLONG')
|
||||
throw error;
|
||||
await fs.retry.rename(timeout)(tempPath, Temp.truncate(filePath));
|
||||
}
|
||||
tempDisposer();
|
||||
tempPath = null;
|
||||
}
|
||||
finally {
|
||||
if (fd)
|
||||
await fs.attempt.close(fd);
|
||||
if (tempPath)
|
||||
Temp.purge(tempPath);
|
||||
if (schedulerCustomDisposer)
|
||||
schedulerCustomDisposer();
|
||||
if (schedulerDisposer)
|
||||
schedulerDisposer();
|
||||
}
|
||||
}
|
||||
function writeFileSync(filePath, data, options = DEFAULT_WRITE_OPTIONS) {
|
||||
if (isString(options))
|
||||
return writeFileSync(filePath, data, { encoding: options });
|
||||
const timeout = Date.now() + ((options.timeout ?? DEFAULT_TIMEOUT_SYNC) || -1);
|
||||
let tempDisposer = null;
|
||||
let tempPath = null;
|
||||
let fd = null;
|
||||
try {
|
||||
const filePathReal = fs.attempt.realpathSync(filePath);
|
||||
const filePathExists = !!filePathReal;
|
||||
filePath = filePathReal || filePath;
|
||||
[tempPath, tempDisposer] = Temp.get(filePath, options.tmpCreate || Temp.create, !(options.tmpPurge === false));
|
||||
const useStatChown = IS_POSIX && isUndefined(options.chown);
|
||||
const useStatMode = isUndefined(options.mode);
|
||||
if (filePathExists && (useStatChown || useStatMode)) {
|
||||
const stats = fs.attempt.statSync(filePath);
|
||||
if (stats) {
|
||||
options = { ...options };
|
||||
if (useStatChown) {
|
||||
options.chown = { uid: stats.uid, gid: stats.gid };
|
||||
}
|
||||
if (useStatMode) {
|
||||
options.mode = stats.mode;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!filePathExists) {
|
||||
const parentPath = path.dirname(filePath);
|
||||
fs.attempt.mkdirSync(parentPath, {
|
||||
mode: DEFAULT_FOLDER_MODE,
|
||||
recursive: true
|
||||
});
|
||||
}
|
||||
fd = fs.retry.openSync(timeout)(tempPath, 'w', options.mode || DEFAULT_FILE_MODE);
|
||||
if (options.tmpCreated) {
|
||||
options.tmpCreated(tempPath);
|
||||
}
|
||||
if (isString(data)) {
|
||||
fs.retry.writeSync(timeout)(fd, data, 0, options.encoding || DEFAULT_ENCODING);
|
||||
}
|
||||
else if (!isUndefined(data)) {
|
||||
fs.retry.writeSync(timeout)(fd, data, 0, data.length, 0);
|
||||
}
|
||||
if (options.fsync !== false) {
|
||||
if (options.fsyncWait !== false) {
|
||||
fs.retry.fsyncSync(timeout)(fd);
|
||||
}
|
||||
else {
|
||||
fs.attempt.fsync(fd);
|
||||
}
|
||||
}
|
||||
fs.retry.closeSync(timeout)(fd);
|
||||
fd = null;
|
||||
if (options.chown && (options.chown.uid !== DEFAULT_USER_UID || options.chown.gid !== DEFAULT_USER_GID)) {
|
||||
fs.attempt.chownSync(tempPath, options.chown.uid, options.chown.gid);
|
||||
}
|
||||
if (options.mode && options.mode !== DEFAULT_FILE_MODE) {
|
||||
fs.attempt.chmodSync(tempPath, options.mode);
|
||||
}
|
||||
try {
|
||||
fs.retry.renameSync(timeout)(tempPath, filePath);
|
||||
}
|
||||
catch (error) {
|
||||
if (!isException(error))
|
||||
throw error;
|
||||
if (error.code !== 'ENAMETOOLONG')
|
||||
throw error;
|
||||
fs.retry.renameSync(timeout)(tempPath, Temp.truncate(filePath));
|
||||
}
|
||||
tempDisposer();
|
||||
tempPath = null;
|
||||
}
|
||||
finally {
|
||||
if (fd)
|
||||
fs.attempt.closeSync(fd);
|
||||
if (tempPath)
|
||||
Temp.purge(tempPath);
|
||||
}
|
||||
}
|
||||
/* EXPORT */
|
||||
export { readFile, readFileSync, writeFile, writeFileSync };
|
||||
28
node_modules/atomically/dist/types.d.ts
generated
vendored
Normal file
28
node_modules/atomically/dist/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
/// <reference types="node" />
|
||||
type Callback = (error: Exception | void) => void;
|
||||
type Data = Uint8Array | string | undefined;
|
||||
type Disposer = () => void;
|
||||
type Encoding = 'ascii' | 'base64' | 'binary' | 'hex' | 'latin1' | 'utf8' | 'utf-8' | 'utf16le' | 'ucs2' | 'ucs-2';
|
||||
type Exception = NodeJS.ErrnoException;
|
||||
type Path = string;
|
||||
type ReadOptions = {
|
||||
encoding?: Encoding | null;
|
||||
mode?: string | number | false;
|
||||
timeout?: number;
|
||||
};
|
||||
type WriteOptions = {
|
||||
chown?: {
|
||||
gid: number;
|
||||
uid: number;
|
||||
} | false;
|
||||
encoding?: Encoding | null;
|
||||
fsync?: boolean;
|
||||
fsyncWait?: boolean;
|
||||
mode?: string | number | false;
|
||||
schedule?: (filePath: string) => Promise<Disposer>;
|
||||
timeout?: number;
|
||||
tmpCreate?: (filePath: string) => string;
|
||||
tmpCreated?: (filePath: string) => void;
|
||||
tmpPurge?: boolean;
|
||||
};
|
||||
export type { Callback, Data, Disposer, Encoding, Exception, Path, ReadOptions, WriteOptions };
|
||||
2
node_modules/atomically/dist/types.js
generated
vendored
Normal file
2
node_modules/atomically/dist/types.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/* MAIN */
|
||||
export {};
|
||||
6
node_modules/atomically/dist/utils/lang.d.ts
generated
vendored
Normal file
6
node_modules/atomically/dist/utils/lang.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/// <reference types="node" />
|
||||
declare const isException: (value: unknown) => value is NodeJS.ErrnoException;
|
||||
declare const isFunction: (value: unknown) => value is Function;
|
||||
declare const isString: (value: unknown) => value is string;
|
||||
declare const isUndefined: (value: unknown) => value is undefined;
|
||||
export { isException, isFunction, isString, isUndefined };
|
||||
16
node_modules/atomically/dist/utils/lang.js
generated
vendored
Normal file
16
node_modules/atomically/dist/utils/lang.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
/* IMPORT */
|
||||
/* MAIN */
|
||||
const isException = (value) => {
|
||||
return (value instanceof Error) && ('code' in value);
|
||||
};
|
||||
const isFunction = (value) => {
|
||||
return (typeof value === 'function');
|
||||
};
|
||||
const isString = (value) => {
|
||||
return (typeof value === 'string');
|
||||
};
|
||||
const isUndefined = (value) => {
|
||||
return (value === undefined);
|
||||
};
|
||||
/* EXPORT */
|
||||
export { isException, isFunction, isString, isUndefined };
|
||||
6
node_modules/atomically/dist/utils/scheduler.d.ts
generated
vendored
Normal file
6
node_modules/atomically/dist/utils/scheduler.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { Disposer } from '../types';
|
||||
declare const Scheduler: {
|
||||
next: (id: string) => void;
|
||||
schedule: (id: string) => Promise<Disposer>;
|
||||
};
|
||||
export default Scheduler;
|
||||
34
node_modules/atomically/dist/utils/scheduler.js
generated
vendored
Normal file
34
node_modules/atomically/dist/utils/scheduler.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
/* IMPORT */
|
||||
/* HELPERS */
|
||||
const Queues = {};
|
||||
/* MAIN */
|
||||
//TODO: Maybe publish this as a standalone package
|
||||
const Scheduler = {
|
||||
/* API */
|
||||
next: (id) => {
|
||||
const queue = Queues[id];
|
||||
if (!queue)
|
||||
return;
|
||||
queue.shift();
|
||||
const job = queue[0];
|
||||
if (job) {
|
||||
job(() => Scheduler.next(id));
|
||||
}
|
||||
else {
|
||||
delete Queues[id];
|
||||
}
|
||||
},
|
||||
schedule: (id) => {
|
||||
return new Promise(resolve => {
|
||||
let queue = Queues[id];
|
||||
if (!queue)
|
||||
queue = Queues[id] = [];
|
||||
queue.push(resolve);
|
||||
if (queue.length > 1)
|
||||
return;
|
||||
resolve(() => Scheduler.next(id));
|
||||
});
|
||||
}
|
||||
};
|
||||
/* EXPORT */
|
||||
export default Scheduler;
|
||||
11
node_modules/atomically/dist/utils/temp.d.ts
generated
vendored
Normal file
11
node_modules/atomically/dist/utils/temp.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import type { Disposer } from '../types';
|
||||
declare const Temp: {
|
||||
store: Record<string, boolean>;
|
||||
create: (filePath: string) => string;
|
||||
get: (filePath: string, creator: (filePath: string) => string, purge?: boolean) => [string, Disposer];
|
||||
purge: (filePath: string) => void;
|
||||
purgeSync: (filePath: string) => void;
|
||||
purgeSyncAll: () => void;
|
||||
truncate: (filePath: string) => string;
|
||||
};
|
||||
export default Temp;
|
||||
59
node_modules/atomically/dist/utils/temp.js
generated
vendored
Normal file
59
node_modules/atomically/dist/utils/temp.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
/* IMPORT */
|
||||
import path from 'node:path';
|
||||
import fs from 'stubborn-fs';
|
||||
import whenExit from 'when-exit';
|
||||
import { LIMIT_BASENAME_LENGTH } from '../constants.js';
|
||||
/* MAIN */
|
||||
//TODO: Maybe publish this as a standalone package
|
||||
const Temp = {
|
||||
/* VARIABLES */
|
||||
store: {},
|
||||
/* API */
|
||||
create: (filePath) => {
|
||||
const randomness = `000000${Math.floor(Math.random() * 16777215).toString(16)}`.slice(-6); // 6 random-enough hex characters
|
||||
const timestamp = Date.now().toString().slice(-10); // 10 precise timestamp digits
|
||||
const prefix = 'tmp-';
|
||||
const suffix = `.${prefix}${timestamp}${randomness}`;
|
||||
const tempPath = `${filePath}${suffix}`;
|
||||
return tempPath;
|
||||
},
|
||||
get: (filePath, creator, purge = true) => {
|
||||
const tempPath = Temp.truncate(creator(filePath));
|
||||
if (tempPath in Temp.store)
|
||||
return Temp.get(filePath, creator, purge); // Collision found, try again
|
||||
Temp.store[tempPath] = purge;
|
||||
const disposer = () => delete Temp.store[tempPath];
|
||||
return [tempPath, disposer];
|
||||
},
|
||||
purge: (filePath) => {
|
||||
if (!Temp.store[filePath])
|
||||
return;
|
||||
delete Temp.store[filePath];
|
||||
fs.attempt.unlink(filePath);
|
||||
},
|
||||
purgeSync: (filePath) => {
|
||||
if (!Temp.store[filePath])
|
||||
return;
|
||||
delete Temp.store[filePath];
|
||||
fs.attempt.unlinkSync(filePath);
|
||||
},
|
||||
purgeSyncAll: () => {
|
||||
for (const filePath in Temp.store) {
|
||||
Temp.purgeSync(filePath);
|
||||
}
|
||||
},
|
||||
truncate: (filePath) => {
|
||||
const basename = path.basename(filePath);
|
||||
if (basename.length <= LIMIT_BASENAME_LENGTH)
|
||||
return filePath; //FIXME: Rough and quick attempt at detecting ok lengths
|
||||
const truncable = /^(\.?)(.*?)((?:\.[^.]+)?(?:\.tmp-\d{10}[a-f0-9]{6})?)$/.exec(basename);
|
||||
if (!truncable)
|
||||
return filePath; //FIXME: No truncable part detected, can't really do much without also changing the parent path, which is unsafe, hoping for the best here
|
||||
const truncationLength = basename.length - LIMIT_BASENAME_LENGTH;
|
||||
return `${filePath.slice(0, -basename.length)}${truncable[1]}${truncable[2].slice(0, -truncationLength)}${truncable[3]}`; //FIXME: The truncable part might be shorter than needed here
|
||||
}
|
||||
};
|
||||
/* INIT */
|
||||
whenExit(Temp.purgeSyncAll); // Ensuring purgeable temp files are purged on exit
|
||||
/* EXPORT */
|
||||
export default Temp;
|
||||
21
node_modules/atomically/license
generated
vendored
Normal file
21
node_modules/atomically/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2020-present Fabio Spampinato
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
41
node_modules/atomically/package.json
generated
vendored
Normal file
41
node_modules/atomically/package.json
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
{
|
||||
"name": "atomically",
|
||||
"repository": "github:fabiospampinato/atomically",
|
||||
"description": "Read and write files atomically and reliably.",
|
||||
"version": "2.0.3",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"exports": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"scripts": {
|
||||
"benchmark": "tsex benchmark",
|
||||
"benchmarkLwatch": "tsex benchmark --watch",
|
||||
"clean": "tsex clean",
|
||||
"compile": "tsex compile",
|
||||
"compile:watch": "tsex compile --watch",
|
||||
"test:init": "esbuild --bundle --target=es2020 --platform=node --format=cjs src/index.ts > test/atomically.cjs",
|
||||
"test": "npm run test:init && tap --no-check-coverage --no-coverage-report",
|
||||
"test:watch": "npm run test:init && tap --no-check-coverage --no-coverage-report --watch",
|
||||
"prepublishOnly": "npm run clean && npm run compile && npm run test"
|
||||
},
|
||||
"keywords": [
|
||||
"atomic",
|
||||
"read",
|
||||
"write",
|
||||
"file",
|
||||
"reliable"
|
||||
],
|
||||
"dependencies": {
|
||||
"stubborn-fs": "^1.2.5",
|
||||
"when-exit": "^2.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.4.6",
|
||||
"esbuild": "^0.18.17",
|
||||
"require-inject": "^1.4.4",
|
||||
"tap": "^16.3.8",
|
||||
"tsex": "^3.0.0",
|
||||
"typescript": "^5.1.6",
|
||||
"write-file-atomic": "^5.0.1"
|
||||
}
|
||||
}
|
||||
147
node_modules/atomically/readme.md
generated
vendored
Normal file
147
node_modules/atomically/readme.md
generated
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
# Atomically
|
||||
|
||||
Read and write files atomically and reliably.
|
||||
|
||||
## Features
|
||||
|
||||
- Overview:
|
||||
- This library is a rewrite of [`write-file-atomic`](https://github.com/npm/write-file-atomic), with some important enhancements on top, you can largely use this as a drop-in replacement.
|
||||
- This library is written in TypeScript, so types aren't an afterthought but come with library.
|
||||
- This library is slightly faster than [`write-file-atomic`](https://github.com/npm/write-file-atomic), and it can be 10x faster, while being essentially just as safe, by using the `fsyncWait` option.
|
||||
- This library has 0 third-party dependencies, so there's less code to vet and the entire thing is roughly 20% smaller than [`write-file-atomic`](https://github.com/npm/write-file-atomic).
|
||||
- This library tries harder to write files on disk than [`write-file-atomic`](https://github.com/npm/write-file-atomic) does, by default retrying some failed operations and handling some more errors.
|
||||
- Reliability:
|
||||
- Reads are retried, when appropriate, until they succeed or the timeout is reached.
|
||||
- Writes are atomic, meaning that first a temporary file containing the new content is written, then this file is renamed to the final path, this way it's impossible to get a corrupt/partially-written file.
|
||||
- Writes happening to the same path are queued, ensuring they don't interfere with each other.
|
||||
- Temporary files can be configured to not be purged from disk if the write operation fails, which is useful for when keeping the temporary file is better than just losing data.
|
||||
- Any needed missing parent folder will be created automatically.
|
||||
- Symlinks are resolved automatically.
|
||||
- `ENOSYS` errors on `chmod`/`chown` operations are ignored.
|
||||
- `EINVAL`/`EPERM` errors on `chmod`/`chown` operations, in POSIX systems where the user is not root, are ignored.
|
||||
- `EMFILE`/`ENFILE`/`EAGAIN`/`EBUSY`/`EACCESS`/`EACCES`/`EACCS`/`EPERM` errors happening during necessary operations are caught and the operations are retried until they succeed or the timeout is reached.
|
||||
- `ENAMETOOLONG` errors, both appening because of the final path or the temporary path, are attempted to be worked around by smartly truncating paths.
|
||||
- Temporary files:
|
||||
- By default they are purged automatically once the write operation is completed or if the process exits (cleanly or not).
|
||||
- By default they are created by appending a `.tmp-[timestamp][randomness]` suffix to destination paths:
|
||||
- The `tmp-` part gives users a hint about the nature of these files, if they happen to see them.
|
||||
- The `[timestamp]` part consists of the 10 least significant digits of a milliseconds-precise timestamp, making it likely that if more than one of these files are kept on disk the user will see them in chronological order.
|
||||
- The `[randomness]` part consists of 6 random hex characters.
|
||||
- If by any chance a collision is found then another suffix is generated.
|
||||
- Custom options:
|
||||
- `chown`: it allows you to specify custom group and user ids:
|
||||
- by default the old file's ids are copied over.
|
||||
- if custom ids are provided they will be used.
|
||||
- if `false` the default ids are used.
|
||||
- `encoding`: it allows you to specify the encoding of the file content:
|
||||
- by default when reading no encoding is specified and a raw buffer is returned.
|
||||
- by default when writing `utf8` is used when.
|
||||
- `fsync`: it allows you to control whether the `fsync` syscall is triggered right after writing the file or not:
|
||||
- by default the syscall is triggered immediately after writing the file, increasing the chances that the file will actually be written to disk in case of imminent catastrophic failures, like power outages.
|
||||
- if `false` the syscall won't be triggered.
|
||||
- `fsyncWait`: it allows you to control whether the triggered `fsync` is waited or not:
|
||||
- by default the syscall is waited.
|
||||
- if `false` the syscall will still be triggered but not be waited.
|
||||
- this increases performance 10x in some cases, and at the end of the day often there's no plan B if `fsync` fails anyway.
|
||||
- `mode`: it allows you to specify the mode for the file:
|
||||
- by default the old file's mode is copied over.
|
||||
- if `false` then `0o666` is used.
|
||||
- `schedule`: it's a function that returns a promise that resolves to a disposer function, basically it allows you to provide some custom queueing logic for the writing operation, allowing you to perhaps wire `atomically` with your app's main filesystem job scheduler:
|
||||
- even when a custom `schedule` function is provided write operations will still be queued internally by the library too.
|
||||
- `timeout`: it allows you to specify the amount of maximum milliseconds within which the library will retry some failed operations:
|
||||
- when writing asynchronously by default it will keep retrying for 7500 milliseconds.
|
||||
- when writing synchronously by default it will keep retrying for 1000 milliseconds.
|
||||
- if `0` or `-1` no failed operations will be retried.
|
||||
- if another number is provided that will be the timeout interval.
|
||||
- `tmpCreate`: it's a function that will be used to create the custom temporary file path in place of the default one:
|
||||
- even when a custom function is provided the final temporary path will still be truncated if the library thinks that it may lead to `ENAMETOOLONG` errors.
|
||||
- paths by default are truncated in a way that preserves an eventual existing leading dot and trailing extension.
|
||||
- `tmpCreated`: it's a function that will be called with the newly created temporary file path.
|
||||
- `tmpPurge`: it allows you to control whether the temporary file will be purged from the filesystem or not if the write fails:
|
||||
- by default it will be purged.
|
||||
- if `false` it will be kept on disk.
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
npm install --save atomically
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
This is the shape of the optional options object:
|
||||
|
||||
```ts
|
||||
type Disposer = () => void;
|
||||
|
||||
type ReadOptions = string | {
|
||||
encoding?: string | null,
|
||||
mode?: string | number | false,
|
||||
timeout?: number
|
||||
};
|
||||
|
||||
type WriteOptions = string | {
|
||||
chown?: { gid: number, uid: number } | false,
|
||||
encoding?: string | null,
|
||||
fsync?: boolean,
|
||||
fsyncWait?: boolean,
|
||||
mode?: string | number | false,
|
||||
schedule?: ( filePath: string ) => Promise<Disposer>,
|
||||
timeout?: number,
|
||||
tmpCreate?: ( filePath: string ) => string,
|
||||
tmpCreated?: ( filePath: string ) => any,
|
||||
tmpPurge?: boolean
|
||||
};
|
||||
```
|
||||
|
||||
This is the shape of the provided functions:
|
||||
|
||||
```ts
|
||||
function readFile ( filePath: string, options?: ReadOptions ): Promise<Buffer | string>;
|
||||
function readFileSync ( filePath: string, options?: ReadOptions ): Buffer | string;
|
||||
function writeFile ( filePath: string, data: Buffer | string | undefined, options?: WriteOptions ): Promise<void>;
|
||||
function writeFileSync ( filePath: string, data: Buffer | string | undefined, options?: WriteOptions ): void;
|
||||
```
|
||||
|
||||
This is how to use the library:
|
||||
|
||||
```ts
|
||||
import {readFile, readFileSync, writeFile, writeFileSync} from 'atomically';
|
||||
|
||||
// Asynchronous read with default option
|
||||
const buffer = await readFile ( '/foo.txt' );
|
||||
|
||||
// Synchronous read assuming the encoding is "utf8"
|
||||
const string = readFileSync ( '/foo.txt', 'utf8' );
|
||||
|
||||
// Asynchronous write with default options
|
||||
await writeFile ( '/foo.txt', 'my_data' );
|
||||
|
||||
// Asynchronous write that doesn't prod the old file for a stat object at all
|
||||
await writeFile ( '/foo.txt', 'my_data', { chown: false, mode: false } );
|
||||
|
||||
// 10x faster asynchronous write that's less resilient against imminent catastrophies
|
||||
await writeFile ( '/foo.txt', 'my_data', { fsync: false } );
|
||||
|
||||
// 10x faster asynchronous write that's essentially still as resilient against imminent catastrophies
|
||||
await writeFile ( '/foo.txt', 'my_data', { fsyncWait: false } );
|
||||
|
||||
// Asynchronous write with a custom schedule function
|
||||
await writeFile ( '/foo.txt', 'my_data', {
|
||||
schedule: filePath => {
|
||||
return new Promise ( resolve => { // When this returned promise will resolve the write operation will begin
|
||||
MyScheduler.schedule ( filePath, () => { // Hypothetical scheduler function that will eventually tell us to go on with this write operation
|
||||
const disposer = () => {}; // Hypothetical function that contains eventual clean-up logic, it will be called after the write operation has been completed (successfully or not)
|
||||
resolve ( disposer ); // Resolving the promise with a disposer, beginning the write operation
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Synchronous write with default options
|
||||
writeFileSync ( '/foo.txt', 'my_data' );
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT © Fabio Spampinato
|
||||
39
node_modules/atomically/src/constants.ts
generated
vendored
Normal file
39
node_modules/atomically/src/constants.ts
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
|
||||
/* IMPORT */
|
||||
|
||||
import os from 'node:os';
|
||||
import process from 'node:process';
|
||||
|
||||
/* MAIN */
|
||||
|
||||
const DEFAULT_ENCODING = 'utf8';
|
||||
|
||||
const DEFAULT_FILE_MODE = 0o666;
|
||||
|
||||
const DEFAULT_FOLDER_MODE = 0o777;
|
||||
|
||||
const DEFAULT_READ_OPTIONS = {};
|
||||
|
||||
const DEFAULT_WRITE_OPTIONS = {};
|
||||
|
||||
const DEFAULT_USER_UID = os.userInfo ().uid;
|
||||
|
||||
const DEFAULT_USER_GID = os.userInfo ().gid;
|
||||
|
||||
const DEFAULT_TIMEOUT_ASYNC = 7500;
|
||||
|
||||
const DEFAULT_TIMEOUT_SYNC = 1000;
|
||||
|
||||
const IS_POSIX = !!process.getuid;
|
||||
|
||||
const IS_USER_ROOT = process.getuid ? !process.getuid () : false;
|
||||
|
||||
const LIMIT_BASENAME_LENGTH = 128; //TODO: Fetch the real limit from the filesystem //TODO: Fetch the whole-path length limit too
|
||||
|
||||
const LIMIT_FILES_DESCRIPTORS = 10_000; //TODO: Fetch the real limit from the filesystem
|
||||
|
||||
const NOOP = () => {};
|
||||
|
||||
/* EXPORT */
|
||||
|
||||
export {DEFAULT_ENCODING, DEFAULT_FILE_MODE, DEFAULT_FOLDER_MODE, DEFAULT_READ_OPTIONS, DEFAULT_WRITE_OPTIONS, DEFAULT_USER_UID, DEFAULT_USER_GID, DEFAULT_TIMEOUT_ASYNC, DEFAULT_TIMEOUT_SYNC, IS_POSIX, IS_USER_ROOT, LIMIT_BASENAME_LENGTH, LIMIT_FILES_DESCRIPTORS, NOOP};
|
||||
331
node_modules/atomically/src/index.ts
generated
vendored
Normal file
331
node_modules/atomically/src/index.ts
generated
vendored
Normal file
@@ -0,0 +1,331 @@
|
||||
|
||||
/* IMPORT */
|
||||
|
||||
import path from 'node:path';
|
||||
import fs from 'stubborn-fs';
|
||||
import {DEFAULT_ENCODING, DEFAULT_FILE_MODE, DEFAULT_FOLDER_MODE, DEFAULT_READ_OPTIONS, DEFAULT_WRITE_OPTIONS, DEFAULT_USER_UID, DEFAULT_USER_GID, DEFAULT_TIMEOUT_ASYNC, DEFAULT_TIMEOUT_SYNC, IS_POSIX} from './constants';
|
||||
import {isException, isFunction, isString, isUndefined} from './utils/lang';
|
||||
import Scheduler from './utils/scheduler';
|
||||
import Temp from './utils/temp';
|
||||
import type {Callback, Data, Disposer, Encoding, Path, ReadOptions, WriteOptions} from './types';
|
||||
|
||||
/* MAIN */
|
||||
|
||||
function readFile ( filePath: Path, options: Encoding | ReadOptions & { encoding: string } ): Promise<string>;
|
||||
function readFile ( filePath: Path, options?: ReadOptions ): Promise<Buffer>;
|
||||
function readFile ( filePath: Path, options: Encoding | ReadOptions = DEFAULT_READ_OPTIONS ): Promise<Buffer | string> {
|
||||
|
||||
if ( isString ( options ) ) return readFile ( filePath, { encoding: options } );
|
||||
|
||||
const timeout = Date.now () + ( ( options.timeout ?? DEFAULT_TIMEOUT_ASYNC ) || -1 );
|
||||
|
||||
return fs.retry.readFile ( timeout )( filePath, options );
|
||||
|
||||
}
|
||||
|
||||
function readFileSync ( filePath: Path, options: Encoding | ReadOptions & { encoding: string } ): string;
|
||||
function readFileSync ( filePath: Path, options?: ReadOptions ): Buffer;
|
||||
function readFileSync ( filePath: Path, options: Encoding | ReadOptions = DEFAULT_READ_OPTIONS ): Buffer | string {
|
||||
|
||||
if ( isString ( options ) ) return readFileSync ( filePath, { encoding: options } );
|
||||
|
||||
const timeout = Date.now () + ( ( options.timeout ?? DEFAULT_TIMEOUT_SYNC ) || -1 );
|
||||
|
||||
return fs.retry.readFileSync ( timeout )( filePath, options );
|
||||
|
||||
}
|
||||
|
||||
function writeFile ( filePath: Path, data: Data, callback?: Callback ): Promise<void>;
|
||||
function writeFile ( filePath: Path, data: Data, options?: Encoding | WriteOptions, callback?: Callback ): Promise<void>;
|
||||
function writeFile ( filePath: Path, data: Data, options?: Encoding | WriteOptions | Callback, callback?: Callback ): Promise<void> {
|
||||
|
||||
if ( isFunction ( options ) ) return writeFile ( filePath, data, DEFAULT_WRITE_OPTIONS, options );
|
||||
|
||||
const promise = writeFileAsync ( filePath, data, options );
|
||||
|
||||
if ( callback ) promise.then ( callback, callback );
|
||||
|
||||
return promise;
|
||||
|
||||
}
|
||||
|
||||
async function writeFileAsync ( filePath: Path, data: Data, options: Encoding | WriteOptions = DEFAULT_WRITE_OPTIONS ): Promise<void> {
|
||||
|
||||
if ( isString ( options ) ) return writeFileAsync ( filePath, data, { encoding: options } );
|
||||
|
||||
const timeout = Date.now () + ( ( options.timeout ?? DEFAULT_TIMEOUT_ASYNC ) || -1 );
|
||||
|
||||
let schedulerCustomDisposer: Disposer | null = null;
|
||||
let schedulerDisposer: Disposer | null = null;
|
||||
let tempDisposer: Disposer | null = null;
|
||||
let tempPath: string | null = null;
|
||||
let fd: number | null = null;
|
||||
|
||||
try {
|
||||
|
||||
if ( options.schedule ) schedulerCustomDisposer = await options.schedule ( filePath );
|
||||
|
||||
schedulerDisposer = await Scheduler.schedule ( filePath );
|
||||
|
||||
const filePathReal = await fs.attempt.realpath ( filePath );
|
||||
const filePathExists = !!filePathReal;
|
||||
|
||||
filePath = filePathReal || filePath;
|
||||
|
||||
[tempPath, tempDisposer] = Temp.get ( filePath, options.tmpCreate || Temp.create, !( options.tmpPurge === false ) );
|
||||
|
||||
const useStatChown = IS_POSIX && isUndefined ( options.chown );
|
||||
const useStatMode = isUndefined ( options.mode );
|
||||
|
||||
if ( filePathExists && ( useStatChown || useStatMode ) ) {
|
||||
|
||||
const stats = await fs.attempt.stat ( filePath );
|
||||
|
||||
if ( stats ) {
|
||||
|
||||
options = { ...options };
|
||||
|
||||
if ( useStatChown ) {
|
||||
|
||||
options.chown = { uid: stats.uid, gid: stats.gid };
|
||||
|
||||
}
|
||||
|
||||
if ( useStatMode ) {
|
||||
|
||||
options.mode = stats.mode;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ( !filePathExists ) {
|
||||
|
||||
const parentPath = path.dirname ( filePath );
|
||||
|
||||
await fs.attempt.mkdir ( parentPath, {
|
||||
mode: DEFAULT_FOLDER_MODE,
|
||||
recursive: true
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
fd = await fs.retry.open ( timeout )( tempPath, 'w', options.mode || DEFAULT_FILE_MODE );
|
||||
|
||||
if ( options.tmpCreated ) {
|
||||
|
||||
options.tmpCreated ( tempPath );
|
||||
|
||||
}
|
||||
|
||||
if ( isString ( data ) ) {
|
||||
|
||||
await fs.retry.write ( timeout )( fd, data, 0, options.encoding || DEFAULT_ENCODING );
|
||||
|
||||
} else if ( !isUndefined ( data ) ) {
|
||||
|
||||
await fs.retry.write ( timeout )( fd, data, 0, data.length, 0 );
|
||||
|
||||
}
|
||||
|
||||
if ( options.fsync !== false ) {
|
||||
|
||||
if ( options.fsyncWait !== false ) {
|
||||
|
||||
await fs.retry.fsync ( timeout )( fd );
|
||||
|
||||
} else {
|
||||
|
||||
fs.attempt.fsync ( fd );
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
await fs.retry.close ( timeout )( fd );
|
||||
|
||||
fd = null;
|
||||
|
||||
if ( options.chown && ( options.chown.uid !== DEFAULT_USER_UID || options.chown.gid !== DEFAULT_USER_GID ) ) {
|
||||
|
||||
await fs.attempt.chown ( tempPath, options.chown.uid, options.chown.gid );
|
||||
|
||||
}
|
||||
|
||||
if ( options.mode && options.mode !== DEFAULT_FILE_MODE ) {
|
||||
|
||||
await fs.attempt.chmod ( tempPath, options.mode );
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
await fs.retry.rename ( timeout )( tempPath, filePath );
|
||||
|
||||
} catch ( error: unknown ) {
|
||||
|
||||
if ( !isException ( error ) ) throw error;
|
||||
|
||||
if ( error.code !== 'ENAMETOOLONG' ) throw error;
|
||||
|
||||
await fs.retry.rename ( timeout )( tempPath, Temp.truncate ( filePath ) );
|
||||
|
||||
}
|
||||
|
||||
tempDisposer ();
|
||||
|
||||
tempPath = null;
|
||||
|
||||
} finally {
|
||||
|
||||
if ( fd ) await fs.attempt.close ( fd );
|
||||
|
||||
if ( tempPath ) Temp.purge ( tempPath );
|
||||
|
||||
if ( schedulerCustomDisposer ) schedulerCustomDisposer ();
|
||||
|
||||
if ( schedulerDisposer ) schedulerDisposer ();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function writeFileSync ( filePath: Path, data: Data, options: Encoding | WriteOptions = DEFAULT_WRITE_OPTIONS ): void {
|
||||
|
||||
if ( isString ( options ) ) return writeFileSync ( filePath, data, { encoding: options } );
|
||||
|
||||
const timeout = Date.now () + ( ( options.timeout ?? DEFAULT_TIMEOUT_SYNC ) || -1 );
|
||||
|
||||
let tempDisposer: Disposer | null = null;
|
||||
let tempPath: string | null = null;
|
||||
let fd: number | null = null;
|
||||
|
||||
try {
|
||||
|
||||
const filePathReal = fs.attempt.realpathSync ( filePath );
|
||||
const filePathExists = !!filePathReal;
|
||||
|
||||
filePath = filePathReal || filePath;
|
||||
|
||||
[tempPath, tempDisposer] = Temp.get ( filePath, options.tmpCreate || Temp.create, !( options.tmpPurge === false ) );
|
||||
|
||||
const useStatChown = IS_POSIX && isUndefined ( options.chown );
|
||||
const useStatMode = isUndefined ( options.mode );
|
||||
|
||||
if ( filePathExists && ( useStatChown || useStatMode ) ) {
|
||||
|
||||
const stats = fs.attempt.statSync ( filePath );
|
||||
|
||||
if ( stats ) {
|
||||
|
||||
options = { ...options };
|
||||
|
||||
if ( useStatChown ) {
|
||||
|
||||
options.chown = { uid: stats.uid, gid: stats.gid };
|
||||
|
||||
}
|
||||
|
||||
if ( useStatMode ) {
|
||||
|
||||
options.mode = stats.mode;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ( !filePathExists ) {
|
||||
|
||||
const parentPath = path.dirname ( filePath );
|
||||
|
||||
fs.attempt.mkdirSync ( parentPath, {
|
||||
mode: DEFAULT_FOLDER_MODE,
|
||||
recursive: true
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
fd = fs.retry.openSync ( timeout )( tempPath, 'w', options.mode || DEFAULT_FILE_MODE );
|
||||
|
||||
if ( options.tmpCreated ) {
|
||||
|
||||
options.tmpCreated ( tempPath );
|
||||
|
||||
}
|
||||
|
||||
if ( isString ( data ) ) {
|
||||
|
||||
fs.retry.writeSync ( timeout )( fd, data, 0, options.encoding || DEFAULT_ENCODING );
|
||||
|
||||
} else if ( !isUndefined ( data ) ) {
|
||||
|
||||
fs.retry.writeSync ( timeout )( fd, data, 0, data.length, 0 );
|
||||
|
||||
}
|
||||
|
||||
if ( options.fsync !== false ) {
|
||||
|
||||
if ( options.fsyncWait !== false ) {
|
||||
|
||||
fs.retry.fsyncSync ( timeout )( fd );
|
||||
|
||||
} else {
|
||||
|
||||
fs.attempt.fsync ( fd );
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fs.retry.closeSync ( timeout )( fd );
|
||||
|
||||
fd = null;
|
||||
|
||||
if ( options.chown && ( options.chown.uid !== DEFAULT_USER_UID || options.chown.gid !== DEFAULT_USER_GID ) ) {
|
||||
|
||||
fs.attempt.chownSync ( tempPath, options.chown.uid, options.chown.gid );
|
||||
|
||||
}
|
||||
|
||||
if ( options.mode && options.mode !== DEFAULT_FILE_MODE ) {
|
||||
|
||||
fs.attempt.chmodSync ( tempPath, options.mode );
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
fs.retry.renameSync ( timeout )( tempPath, filePath );
|
||||
|
||||
} catch ( error: unknown ) {
|
||||
|
||||
if ( !isException ( error ) ) throw error;
|
||||
|
||||
if ( error.code !== 'ENAMETOOLONG' ) throw error;
|
||||
|
||||
fs.retry.renameSync ( timeout )( tempPath, Temp.truncate ( filePath ) );
|
||||
|
||||
}
|
||||
|
||||
tempDisposer ();
|
||||
|
||||
tempPath = null;
|
||||
|
||||
} finally {
|
||||
|
||||
if ( fd ) fs.attempt.closeSync ( fd );
|
||||
|
||||
if ( tempPath ) Temp.purge ( tempPath );
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* EXPORT */
|
||||
|
||||
export {readFile, readFileSync, writeFile, writeFileSync};
|
||||
export type {Encoding, ReadOptions, WriteOptions};
|
||||
37
node_modules/atomically/src/types.ts
generated
vendored
Normal file
37
node_modules/atomically/src/types.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
|
||||
/* MAIN */
|
||||
|
||||
type Callback = ( error: Exception | void ) => void;
|
||||
|
||||
type Data = Uint8Array | string | undefined;
|
||||
|
||||
type Disposer = () => void;
|
||||
|
||||
type Encoding = 'ascii' | 'base64' | 'binary' | 'hex' | 'latin1' | 'utf8' | 'utf-8' | 'utf16le' | 'ucs2' | 'ucs-2';
|
||||
|
||||
type Exception = NodeJS.ErrnoException;
|
||||
|
||||
type Path = string;
|
||||
|
||||
type ReadOptions = {
|
||||
encoding?: Encoding | null,
|
||||
mode?: string | number | false,
|
||||
timeout?: number
|
||||
};
|
||||
|
||||
type WriteOptions = {
|
||||
chown?: { gid: number, uid: number } | false,
|
||||
encoding?: Encoding | null,
|
||||
fsync?: boolean,
|
||||
fsyncWait?: boolean,
|
||||
mode?: string | number | false,
|
||||
schedule?: ( filePath: string ) => Promise<Disposer>,
|
||||
timeout?: number,
|
||||
tmpCreate?: ( filePath: string ) => string,
|
||||
tmpCreated?: ( filePath: string ) => void,
|
||||
tmpPurge?: boolean
|
||||
};
|
||||
|
||||
/* EXPORT */
|
||||
|
||||
export type {Callback, Data, Disposer, Encoding, Exception, Path, ReadOptions, WriteOptions};
|
||||
34
node_modules/atomically/src/utils/lang.ts
generated
vendored
Normal file
34
node_modules/atomically/src/utils/lang.ts
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
|
||||
/* IMPORT */
|
||||
|
||||
import type {Exception} from '../types';
|
||||
|
||||
/* MAIN */
|
||||
|
||||
const isException = ( value: unknown ): value is Exception => {
|
||||
|
||||
return ( value instanceof Error ) && ( 'code' in value );
|
||||
|
||||
};
|
||||
|
||||
const isFunction = ( value: unknown ): value is Function => {
|
||||
|
||||
return ( typeof value === 'function' );
|
||||
|
||||
};
|
||||
|
||||
const isString = ( value: unknown ): value is string => {
|
||||
|
||||
return ( typeof value === 'string' );
|
||||
|
||||
};
|
||||
|
||||
const isUndefined = ( value: unknown ): value is undefined => {
|
||||
|
||||
return ( value === undefined );
|
||||
|
||||
};
|
||||
|
||||
/* EXPORT */
|
||||
|
||||
export {isException, isFunction, isString, isUndefined};
|
||||
62
node_modules/atomically/src/utils/scheduler.ts
generated
vendored
Normal file
62
node_modules/atomically/src/utils/scheduler.ts
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
|
||||
/* IMPORT */
|
||||
|
||||
import type {Disposer} from '../types';
|
||||
|
||||
/* HELPERS */
|
||||
|
||||
const Queues: Record<string, Function[] | undefined> = {};
|
||||
|
||||
/* MAIN */
|
||||
|
||||
//TODO: Maybe publish this as a standalone package
|
||||
|
||||
const Scheduler = {
|
||||
|
||||
/* API */
|
||||
|
||||
next: ( id: string ): void => {
|
||||
|
||||
const queue = Queues[id];
|
||||
|
||||
if ( !queue ) return;
|
||||
|
||||
queue.shift ();
|
||||
|
||||
const job = queue[0];
|
||||
|
||||
if ( job ) {
|
||||
|
||||
job ( () => Scheduler.next ( id ) );
|
||||
|
||||
} else {
|
||||
|
||||
delete Queues[id];
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
schedule: ( id: string ): Promise<Disposer> => {
|
||||
|
||||
return new Promise ( resolve => {
|
||||
|
||||
let queue = Queues[id];
|
||||
|
||||
if ( !queue ) queue = Queues[id] = [];
|
||||
|
||||
queue.push ( resolve );
|
||||
|
||||
if ( queue.length > 1 ) return;
|
||||
|
||||
resolve ( () => Scheduler.next ( id ) );
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/* EXPORT */
|
||||
|
||||
export default Scheduler;
|
||||
102
node_modules/atomically/src/utils/temp.ts
generated
vendored
Normal file
102
node_modules/atomically/src/utils/temp.ts
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
|
||||
/* IMPORT */
|
||||
|
||||
import path from 'node:path';
|
||||
import fs from 'stubborn-fs';
|
||||
import whenExit from 'when-exit';
|
||||
import {LIMIT_BASENAME_LENGTH} from '../constants';
|
||||
import type {Disposer} from '../types';
|
||||
|
||||
/* MAIN */
|
||||
|
||||
//TODO: Maybe publish this as a standalone package
|
||||
|
||||
const Temp = {
|
||||
|
||||
/* VARIABLES */
|
||||
|
||||
store: <Record<string, boolean>> {}, // filePath => purge
|
||||
|
||||
/* API */
|
||||
|
||||
create: ( filePath: string ): string => {
|
||||
|
||||
const randomness = `000000${Math.floor ( Math.random () * 16777215 ).toString ( 16 )}`.slice ( -6 ); // 6 random-enough hex characters
|
||||
const timestamp = Date.now ().toString ().slice ( -10 ); // 10 precise timestamp digits
|
||||
const prefix = 'tmp-';
|
||||
const suffix = `.${prefix}${timestamp}${randomness}`;
|
||||
const tempPath = `${filePath}${suffix}`;
|
||||
|
||||
return tempPath;
|
||||
|
||||
},
|
||||
|
||||
get: ( filePath: string, creator: ( filePath: string ) => string, purge: boolean = true ): [string, Disposer] => {
|
||||
|
||||
const tempPath = Temp.truncate ( creator ( filePath ) );
|
||||
|
||||
if ( tempPath in Temp.store ) return Temp.get ( filePath, creator, purge ); // Collision found, try again
|
||||
|
||||
Temp.store[tempPath] = purge;
|
||||
|
||||
const disposer = () => delete Temp.store[tempPath];
|
||||
|
||||
return [tempPath, disposer];
|
||||
|
||||
},
|
||||
|
||||
purge: ( filePath: string ): void => {
|
||||
|
||||
if ( !Temp.store[filePath] ) return;
|
||||
|
||||
delete Temp.store[filePath];
|
||||
|
||||
fs.attempt.unlink ( filePath );
|
||||
|
||||
},
|
||||
|
||||
purgeSync: ( filePath: string ): void => {
|
||||
|
||||
if ( !Temp.store[filePath] ) return;
|
||||
|
||||
delete Temp.store[filePath];
|
||||
|
||||
fs.attempt.unlinkSync ( filePath );
|
||||
|
||||
},
|
||||
|
||||
purgeSyncAll: (): void => {
|
||||
|
||||
for ( const filePath in Temp.store ) {
|
||||
|
||||
Temp.purgeSync ( filePath );
|
||||
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
truncate: ( filePath: string ): string => { // Truncating paths to avoid getting an "ENAMETOOLONG" error //FIXME: This doesn't really always work, the actual filesystem limits must be detected for this to be implemented correctly
|
||||
|
||||
const basename = path.basename ( filePath );
|
||||
|
||||
if ( basename.length <= LIMIT_BASENAME_LENGTH ) return filePath; //FIXME: Rough and quick attempt at detecting ok lengths
|
||||
|
||||
const truncable = /^(\.?)(.*?)((?:\.[^.]+)?(?:\.tmp-\d{10}[a-f0-9]{6})?)$/.exec ( basename );
|
||||
|
||||
if ( !truncable ) return filePath; //FIXME: No truncable part detected, can't really do much without also changing the parent path, which is unsafe, hoping for the best here
|
||||
|
||||
const truncationLength = basename.length - LIMIT_BASENAME_LENGTH;
|
||||
|
||||
return `${filePath.slice ( 0, - basename.length )}${truncable[1]}${truncable[2].slice ( 0, - truncationLength )}${truncable[3]}`; //FIXME: The truncable part might be shorter than needed here
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/* INIT */
|
||||
|
||||
whenExit ( Temp.purgeSyncAll ); // Ensuring purgeable temp files are purged on exit
|
||||
|
||||
/* EXPORT */
|
||||
|
||||
export default Temp;
|
||||
76
node_modules/atomically/tasks/benchmark.js
generated
vendored
Normal file
76
node_modules/atomically/tasks/benchmark.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
|
||||
/* IMPORT */
|
||||
|
||||
import {randomUUID} from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import {setTimeout as delay} from 'node:timers/promises';
|
||||
import writeFileAtomic from 'write-file-atomic';
|
||||
import {writeFile, writeFileSync} from '../dist/index.js';
|
||||
|
||||
/* MAIN */
|
||||
|
||||
const TEMP = os.tmpdir ();
|
||||
const UUID = randomUUID ();
|
||||
const DST = i => path.join ( TEMP, `atomically-${UUID}-temp-${i}.txt` );
|
||||
const ITERATIONS = 250;
|
||||
|
||||
const runSingleAsync = async ( name, fn, buffer, options ) => {
|
||||
console.time ( name );
|
||||
for ( let i = 0; i < ITERATIONS; i++ ) {
|
||||
await fn ( DST ( i ), buffer, options );
|
||||
}
|
||||
console.timeEnd ( name );
|
||||
await delay ( 1000 );
|
||||
};
|
||||
|
||||
const runSingleSync = async ( name, fn, buffer, options ) => {
|
||||
console.time ( name );
|
||||
for ( let i = 0; i < ITERATIONS; i++ ) {
|
||||
fn ( DST ( i ), buffer, options );
|
||||
}
|
||||
console.timeEnd ( name );
|
||||
await delay ( 1000 );
|
||||
};
|
||||
|
||||
const runAllDummy = () => { // Preparation run
|
||||
runSingleSync ( 'dummy', fs.writeFileSync, '' );
|
||||
};
|
||||
|
||||
const runAllAsync = async ( name, buffer ) => {
|
||||
await runSingleAsync ( `${name} -> async -> write-file-atomic`, writeFileAtomic, buffer, { mode: 0o666 } );
|
||||
await runSingleAsync ( `${name} -> async -> write-file-atomic (faster)`, writeFileAtomic, buffer );
|
||||
await runSingleAsync ( `${name} -> async -> write-file-atomic (fastest)`, writeFileAtomic, buffer, { fsync: false } );
|
||||
await runSingleAsync ( `${name} -> async -> atomically`, writeFile, buffer );
|
||||
await runSingleAsync ( `${name} -> async -> atomically (faster)`, writeFile, buffer, { mode: false, chown: false, fsyncWait: false } );
|
||||
await runSingleAsync ( `${name} -> async -> atomically (fastest)`, writeFile, buffer, { mode: false, chown: false, fsync: false } );
|
||||
};
|
||||
|
||||
const runAllSync = ( name, buffer ) => {
|
||||
runSingleSync ( `${name} -> sync -> write-file-atomic`, writeFileAtomic.sync, buffer, { mode: 0o666 } );
|
||||
runSingleSync ( `${name} -> sync -> write-file-atomic (faster)`, writeFileAtomic.sync, buffer );
|
||||
runSingleSync ( `${name} -> sync -> write-file-atomic (fastest)`, writeFileAtomic.sync, buffer, { fsync: false } );
|
||||
runSingleSync ( `${name} -> sync -> atomically`, writeFileSync, buffer );
|
||||
runSingleSync ( `${name} -> sync -> atomically (faster)`, writeFileSync, buffer, { mode: false, chown: false, fsyncWait: false } );
|
||||
runSingleSync ( `${name} -> sync -> atomically (fastest)`, writeFileSync, buffer, { mode: false, chown: false, fsync: false } );
|
||||
};
|
||||
|
||||
const runAll = async ( name, buffer ) => {
|
||||
await runAllAsync ( name, buffer );
|
||||
console.log ( '-------------------' );
|
||||
runAllSync ( name, buffer );
|
||||
};
|
||||
|
||||
const run = async () => {
|
||||
runAllDummy ();
|
||||
console.log ( '===================' );
|
||||
await runAll ( '100kb', Buffer.allocUnsafe ( 100 * 1024 ) );
|
||||
console.log ( '===================' );
|
||||
await runAll ( '10kb', Buffer.allocUnsafe ( 10 * 1024 ) );
|
||||
console.log ( '===================' );
|
||||
await runAll ( '1kb', Buffer.allocUnsafe ( 1024 ) );
|
||||
console.log ( '===================' );
|
||||
};
|
||||
|
||||
run ();
|
||||
508
node_modules/atomically/test/basic.cjs
generated
vendored
Normal file
508
node_modules/atomically/test/basic.cjs
generated
vendored
Normal file
@@ -0,0 +1,508 @@
|
||||
process.setMaxListeners(1000000);
|
||||
|
||||
const fs = require('fs')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const {test} = require('tap')
|
||||
const requireInject = require('require-inject')
|
||||
|
||||
let expectClose = 0
|
||||
let closeCalled = 0
|
||||
let expectCloseSync = 0
|
||||
let closeSyncCalled = 0
|
||||
const createErr = code => Object.assign(new Error(code), { code })
|
||||
|
||||
let unlinked = []
|
||||
|
||||
const fsMock = Object.assign ( {}, fs, {
|
||||
/* ASYNC */
|
||||
mkdir (filename, opts, cb) {
|
||||
return cb(null);
|
||||
},
|
||||
realpath (filename, cb) {
|
||||
return cb(null, filename)
|
||||
},
|
||||
open (tmpfile, options, mode, cb) {
|
||||
if (/noopen/.test(tmpfile)) return cb(createErr('ENOOPEN'))
|
||||
expectClose++
|
||||
cb(null, tmpfile)
|
||||
},
|
||||
write (fd) {
|
||||
const cb = arguments[arguments.length - 1]
|
||||
if (/nowrite/.test(fd)) return cb(createErr('ENOWRITE'))
|
||||
cb()
|
||||
},
|
||||
fsync (fd, cb) {
|
||||
if (/nofsync/.test(fd)) return cb(createErr('ENOFSYNC'))
|
||||
cb()
|
||||
},
|
||||
close (fd, cb) {
|
||||
closeCalled++
|
||||
cb()
|
||||
},
|
||||
chown (tmpfile, uid, gid, cb) {
|
||||
if (/nochown/.test(tmpfile)) return cb(createErr('ENOCHOWN'))
|
||||
if (/enosys/.test(tmpfile)) return cb(createErr('ENOSYS'))
|
||||
if (/einval/.test(tmpfile)) return cb(createErr('EINVAL'))
|
||||
if (/eperm/.test(tmpfile)) return cb(createErr('EPERM'))
|
||||
cb()
|
||||
},
|
||||
chmod (tmpfile, mode, cb) {
|
||||
if (/nochmod/.test(tmpfile)) return cb(createErr('ENOCHMOD'))
|
||||
if (/enosys/.test(tmpfile)) return cb(createErr('ENOSYS'))
|
||||
if (/eperm/.test(tmpfile)) return cb(createErr('EPERM'))
|
||||
if (/einval/.test(tmpfile)) return cb(createErr('EINVAL'))
|
||||
cb()
|
||||
},
|
||||
rename (tmpfile, filename, cb) {
|
||||
if (/norename/.test(tmpfile)) return cb(createErr('ENORENAME'))
|
||||
cb()
|
||||
},
|
||||
unlink (tmpfile, cb) {
|
||||
if (/nounlink/.test(tmpfile)) return cb(createErr('ENOUNLINK'))
|
||||
cb()
|
||||
},
|
||||
stat (tmpfile, cb) {
|
||||
if (/nostat/.test(tmpfile)) return cb(createErr('ENOSTAT'))
|
||||
if (/statful/.test(tmpfile)) return cb(null, fs.statSync('/'));
|
||||
cb()
|
||||
},
|
||||
/* SYNC */
|
||||
mkdirSync (filename) {},
|
||||
realpathSync (filename, cb) {
|
||||
return filename
|
||||
},
|
||||
openSync (tmpfile, options) {
|
||||
if (/noopen/.test(tmpfile)) throw createErr('ENOOPEN')
|
||||
expectCloseSync++
|
||||
return tmpfile
|
||||
},
|
||||
writeSync (fd) {
|
||||
if (/nowrite/.test(fd)) throw createErr('ENOWRITE')
|
||||
},
|
||||
fsyncSync (fd) {
|
||||
if (/nofsync/.test(fd)) throw createErr('ENOFSYNC')
|
||||
},
|
||||
closeSync (fd) {
|
||||
closeSyncCalled++
|
||||
},
|
||||
chownSync (tmpfile, uid, gid) {
|
||||
if (/nochown/.test(tmpfile)) throw createErr('ENOCHOWN')
|
||||
if (/enosys/.test(tmpfile)) throw createErr('ENOSYS')
|
||||
if (/einval/.test(tmpfile)) throw createErr('EINVAL')
|
||||
if (/eperm/.test(tmpfile)) throw createErr('EPERM')
|
||||
},
|
||||
chmodSync (tmpfile, mode) {
|
||||
if (/nochmod/.test(tmpfile)) throw createErr('ENOCHMOD')
|
||||
if (/enosys/.test(tmpfile)) throw createErr('ENOSYS')
|
||||
if (/einval/.test(tmpfile)) throw createErr('EINVAL')
|
||||
if (/eperm/.test(tmpfile)) throw createErr('EPERM')
|
||||
},
|
||||
renameSync (tmpfile, filename) {
|
||||
if (/norename/.test(tmpfile)) throw createErr('ENORENAME')
|
||||
},
|
||||
unlinkSync (tmpfile) {
|
||||
if (/nounlink/.test(tmpfile)) throw createErr('ENOUNLINK')
|
||||
unlinked.push(tmpfile)
|
||||
},
|
||||
statSync (tmpfile) {
|
||||
if (/nostat/.test(tmpfile)) throw createErr('ENOSTAT')
|
||||
if (/statful/.test(tmpfile)) return fs.statSync('/');
|
||||
}
|
||||
});
|
||||
|
||||
const makeUnstableAsyncFn = function () {
|
||||
return function () {
|
||||
if ( Math.random () <= .75 ) {
|
||||
const code = ['EMFILE', 'ENFILE', 'EAGAIN', 'EBUSY', 'EACCESS', 'EPERM'].sort ( () => Math.random () - .5 )[0];
|
||||
throw createErr ( code );
|
||||
}
|
||||
return arguments[arguments.length -1](null, arguments[0]);
|
||||
};
|
||||
};
|
||||
|
||||
const makeUnstableSyncFn = function ( fn ) {
|
||||
return function () {
|
||||
if ( Math.random () <= .75 ) {
|
||||
const code = ['EMFILE', 'ENFILE', 'EAGAIN', 'EBUSY', 'EACCESS', 'EPERM'].sort ( () => Math.random () - .5 )[0];
|
||||
throw createErr ( code );
|
||||
}
|
||||
return fn.apply(undefined, arguments)
|
||||
};
|
||||
};
|
||||
|
||||
const fsMockUnstable = Object.assign ( {}, fsMock, {
|
||||
open: makeUnstableAsyncFn (),
|
||||
write: makeUnstableAsyncFn (),
|
||||
fsync: makeUnstableAsyncFn (),
|
||||
close: makeUnstableAsyncFn (),
|
||||
rename: makeUnstableAsyncFn (),
|
||||
openSync: makeUnstableSyncFn ( x => x ),
|
||||
writeSync: makeUnstableSyncFn ( () => {} ),
|
||||
fsyncSync: makeUnstableSyncFn ( () => {} ),
|
||||
closeSync: makeUnstableSyncFn ( () => {} ),
|
||||
renameSync: makeUnstableSyncFn ( () => {} )
|
||||
});
|
||||
|
||||
const {writeFile: writeFileAtomic, writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs: fsMock });
|
||||
|
||||
test('async tests', t => {
|
||||
t.plan(2)
|
||||
|
||||
expectClose = 0
|
||||
closeCalled = 0
|
||||
t.teardown(() => {
|
||||
t.parent.equal(closeCalled, expectClose, 'async tests closed all files')
|
||||
expectClose = 0
|
||||
closeCalled = 0
|
||||
})
|
||||
|
||||
t.test('non-root tests', t => {
|
||||
t.plan(28)
|
||||
|
||||
writeFileAtomic('good', 'test', { mode: '0777' }, err => {
|
||||
t.notOk(err, 'No errors occur when passing in options')
|
||||
})
|
||||
writeFileAtomic('good', 'test', 'utf8', err => {
|
||||
t.notOk(err, 'No errors occur when passing in options as string')
|
||||
})
|
||||
writeFileAtomic('good', 'test', undefined, err => {
|
||||
t.notOk(err, 'No errors occur when NOT passing in options')
|
||||
})
|
||||
writeFileAtomic('good', 'test', err => {
|
||||
t.notOk(err)
|
||||
})
|
||||
writeFileAtomic('noopen', 'test', err => {
|
||||
t.equal(err.message, 'ENOOPEN', 'fs.open failures propagate')
|
||||
})
|
||||
writeFileAtomic('nowrite', 'test', err => {
|
||||
t.equal(err.message, 'ENOWRITE', 'fs.writewrite failures propagate')
|
||||
})
|
||||
writeFileAtomic('nowrite', Buffer.from('test', 'utf8'), err => {
|
||||
t.equal(err.message, 'ENOWRITE', 'fs.writewrite failures propagate for buffers')
|
||||
})
|
||||
writeFileAtomic('nochown', 'test', { chown: { uid: 100, gid: 100 } }, err => {
|
||||
t.equal(err.message, 'ENOCHOWN', 'Chown failures propagate')
|
||||
})
|
||||
writeFileAtomic('nochown', 'test', err => {
|
||||
t.notOk(err, 'No attempt to chown when no uid/gid passed in')
|
||||
})
|
||||
writeFileAtomic('nochmod', 'test', { mode: parseInt('741', 8) }, err => {
|
||||
t.equal(err.message, 'ENOCHMOD', 'Chmod failures propagate')
|
||||
})
|
||||
writeFileAtomic('nofsyncopt', 'test', { fsync: false }, err => {
|
||||
t.notOk(err, 'fsync skipped if options.fsync is false')
|
||||
})
|
||||
writeFileAtomic('norename', 'test', err => {
|
||||
t.equal(err.message, 'ENORENAME', 'Rename errors propagate')
|
||||
})
|
||||
writeFileAtomic('norename nounlink', 'test', err => {
|
||||
t.equal(err.message, 'ENORENAME', 'Failure to unlink the temp file does not clobber the original error')
|
||||
})
|
||||
writeFileAtomic('nofsync', 'test', err => {
|
||||
t.equal(err.message, 'ENOFSYNC', 'Fsync failures propagate')
|
||||
})
|
||||
writeFileAtomic('enosys', 'test', err => {
|
||||
t.notOk(err, 'No errors on ENOSYS')
|
||||
})
|
||||
writeFileAtomic('einval', 'test', { mode: 0o741 }, err => {
|
||||
t.notOk(err, 'No errors on EINVAL for non root')
|
||||
})
|
||||
writeFileAtomic('eperm', 'test', { mode: 0o741 }, err => {
|
||||
t.notOk(err, 'No errors on EPERM for non root')
|
||||
})
|
||||
writeFileAtomic('einval', 'test', { chown: { uid: 100, gid: 100 } }, err => {
|
||||
t.notOk(err, 'No errors on EINVAL for non root')
|
||||
})
|
||||
writeFileAtomic('eperm', 'test', { chown: { uid: 100, gid: 100 } }, err => {
|
||||
t.notOk(err, 'No errors on EPERM for non root')
|
||||
})
|
||||
const optionsImmutable = {};
|
||||
writeFileAtomic('statful', 'test', optionsImmutable, err => {
|
||||
t.notOk(err);
|
||||
t.same(optionsImmutable, {});
|
||||
});
|
||||
const schedule = filePath => {
|
||||
t.equal(filePath, 'good');
|
||||
return new Promise ( resolve => {
|
||||
resolve ( () => {
|
||||
t.equal(true,true);
|
||||
});
|
||||
});
|
||||
};
|
||||
writeFileAtomic('good','test', {schedule}, err => {
|
||||
t.notOk(err);
|
||||
});
|
||||
const tmpCreate = filePath => `.${filePath}.custom`;
|
||||
const tmpCreated = filePath => t.equal(filePath, '.good.custom' );
|
||||
writeFileAtomic('good','test', {tmpCreate, tmpCreated}, err => {
|
||||
t.notOk(err)
|
||||
})
|
||||
const longPath = path.join(os.tmpdir(),'.012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt');
|
||||
const {writeFile: writeFileAtomicNative} = requireInject('./atomically.cjs', { fs });
|
||||
writeFileAtomicNative(longPath,'test', err => {
|
||||
t.notOk(err)
|
||||
})
|
||||
const pathMissingFolders = path.join(os.tmpdir(),String(Math.random()),String(Math.random()),String(Math.random()),'foo.txt');
|
||||
writeFileAtomicNative(pathMissingFolders,'test', err => {
|
||||
t.notOk(err)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('errors for root', t => {
|
||||
const { getuid } = process
|
||||
process.getuid = () => 0
|
||||
t.teardown(() => {
|
||||
process.getuid = getuid
|
||||
})
|
||||
const {writeFile: writeFileAtomic} = requireInject('./atomically.cjs', { fs: fsMock });
|
||||
t.plan(2)
|
||||
writeFileAtomic('einval', 'test', { chown: { uid: 100, gid: 100 } }, err => {
|
||||
t.match(err, { code: 'EINVAL' })
|
||||
})
|
||||
writeFileAtomic('einval', 'test', { mode: 0o741 }, err => {
|
||||
t.match(err, { code: 'EINVAL' })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('unstable async tests', t => {
|
||||
t.plan(2);
|
||||
const {writeFile: writeFileAtomic} = requireInject('./atomically.cjs', { fs: fsMockUnstable });
|
||||
writeFileAtomic('good', 'test', err => {
|
||||
t.notOk(err, 'No errors occur when retryable errors are thrown')
|
||||
})
|
||||
writeFileAtomic('good', 'test', { timeout: 0 }, err => {
|
||||
t.equal(!!err.code, true, 'Retrying can be disabled')
|
||||
})
|
||||
});
|
||||
|
||||
test('sync tests', t => {
|
||||
t.plan(2)
|
||||
closeSyncCalled = 0
|
||||
expectCloseSync = 0
|
||||
t.teardown(() => {
|
||||
t.parent.equal(closeSyncCalled, expectCloseSync, 'sync closed all files')
|
||||
expectCloseSync = 0
|
||||
closeSyncCalled = 0
|
||||
})
|
||||
|
||||
const throws = function (t, shouldthrow, msg, todo) {
|
||||
let err
|
||||
try { todo() } catch (e) { err = e }
|
||||
t.equal(shouldthrow, err.message, msg)
|
||||
}
|
||||
const noexception = function (t, msg, todo) {
|
||||
let err
|
||||
try { todo() } catch (e) { err = e }
|
||||
t.error(err, msg)
|
||||
}
|
||||
let tmpfile
|
||||
|
||||
t.test('non-root', t => {
|
||||
t.plan(38)
|
||||
noexception(t, 'No errors occur when passing in options', () => {
|
||||
writeFileAtomicSync('good', 'test', { mode: '0777' })
|
||||
})
|
||||
noexception(t, 'No errors occur when passing in options as string', () => {
|
||||
writeFileAtomicSync('good', 'test', 'utf8')
|
||||
})
|
||||
noexception(t, 'No errors occur when NOT passing in options', () => {
|
||||
writeFileAtomicSync('good', 'test')
|
||||
})
|
||||
noexception(t, 'fsync never called if options.fsync is falsy', () => {
|
||||
writeFileAtomicSync('good', 'test', { fsync: false })
|
||||
})
|
||||
noexception(t, 'tmpCreated is called on success', () => {
|
||||
writeFileAtomicSync('good', 'test', {
|
||||
tmpCreated (gottmpfile) {
|
||||
tmpfile = gottmpfile
|
||||
}
|
||||
})
|
||||
t.match(tmpfile, /^good\.tmp-\w+$/, 'tmpCreated called for success')
|
||||
t.match(tmpfile, /^good\.tmp-\d{10}[a-f0-9]{6}$/, 'tmpCreated format')
|
||||
})
|
||||
|
||||
tmpfile = undefined
|
||||
throws(t, 'ENOOPEN', 'fs.openSync failures propagate', () => {
|
||||
writeFileAtomicSync('noopen', 'test', {
|
||||
tmpCreated (gottmpfile) {
|
||||
tmpfile = gottmpfile
|
||||
}
|
||||
})
|
||||
})
|
||||
t.equal(tmpfile, undefined, 'tmpCreated not called for open failure')
|
||||
|
||||
throws(t, 'ENOWRITE', 'fs.writeSync failures propagate', () => {
|
||||
writeFileAtomicSync('nowrite', 'test', {
|
||||
tmpCreated (gottmpfile) {
|
||||
tmpfile = gottmpfile
|
||||
}
|
||||
})
|
||||
})
|
||||
t.match(tmpfile, /^nowrite\.tmp-\w+$/, 'tmpCreated called for failure after open')
|
||||
|
||||
throws(t, 'ENOCHOWN', 'Chown failures propagate', () => {
|
||||
writeFileAtomicSync('nochown', 'test', { chown: { uid: 100, gid: 100 } })
|
||||
})
|
||||
noexception(t, 'No attempt to chown when false passed in', () => {
|
||||
writeFileAtomicSync('nochown', 'test', { chown: false })
|
||||
})
|
||||
noexception(t, 'No errors occured when chown is undefined and original file owner used', () => {
|
||||
writeFileAtomicSync('chowncopy', 'test', { chown: undefined })
|
||||
})
|
||||
throws(t, 'ENORENAME', 'Rename errors propagate', () => {
|
||||
writeFileAtomicSync('norename', 'test')
|
||||
})
|
||||
throws(t, 'ENORENAME', 'Failure to unlink the temp file does not clobber the original error', () => {
|
||||
writeFileAtomicSync('norename nounlink', 'test')
|
||||
})
|
||||
throws(t, 'ENOFSYNC', 'Fsync errors propagate', () => {
|
||||
writeFileAtomicSync('nofsync', 'test')
|
||||
})
|
||||
noexception(t, 'No errors on ENOSYS', () => {
|
||||
writeFileAtomicSync('enosys', 'test', { chown: { uid: 100, gid: 100 } })
|
||||
})
|
||||
noexception(t, 'No errors on EINVAL for non root', () => {
|
||||
writeFileAtomicSync('einval', 'test', { chown: { uid: 100, gid: 100 } })
|
||||
})
|
||||
noexception(t, 'No errors on EPERM for non root', () => {
|
||||
writeFileAtomicSync('eperm', 'test', { chown: { uid: 100, gid: 100 } })
|
||||
})
|
||||
|
||||
throws(t, 'ENOCHMOD', 'Chmod failures propagate', () => {
|
||||
writeFileAtomicSync('nochmod', 'test', { mode: 0o741 })
|
||||
})
|
||||
noexception(t, 'No errors on EPERM for non root', () => {
|
||||
writeFileAtomicSync('eperm', 'test', { mode: 0o741 })
|
||||
})
|
||||
noexception(t, 'No attempt to chmod when no mode provided', () => {
|
||||
writeFileAtomicSync('nochmod', 'test', { mode: false })
|
||||
})
|
||||
const optionsImmutable = {};
|
||||
noexception(t, 'options are immutable', () => {
|
||||
writeFileAtomicSync('statful', 'test', optionsImmutable)
|
||||
})
|
||||
t.same(optionsImmutable, {});
|
||||
const tmpCreate = filePath => `.${filePath}.custom`;
|
||||
const tmpCreated = filePath => t.equal(filePath, '.good.custom' );
|
||||
noexception(t, 'custom temp creator', () => {
|
||||
writeFileAtomicSync('good', 'test', {tmpCreate, tmpCreated})
|
||||
})
|
||||
const path0 = path.join(os.tmpdir(),'atomically-test-0');
|
||||
const tmpPath0 = path0 + '.temp';
|
||||
noexception(t, 'temp files are purged on success', () => {
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs });
|
||||
writeFileAtomicSync(path0, 'test', {tmpCreate: () => tmpPath0})
|
||||
})
|
||||
t.equal(true,fs.existsSync(path0));
|
||||
t.equal(false,fs.existsSync(tmpPath0));
|
||||
const path1 = path.join(os.tmpdir(),'atomically-test-norename-1');
|
||||
const tmpPath1 = path1 + '.temp';
|
||||
throws(t, 'ENORENAME', 'temp files are purged on error', () => {
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs: Object.assign ( {}, fs, { renameSync: fsMock.renameSync })});
|
||||
writeFileAtomicSync(path1, 'test', {tmpCreate: () => tmpPath1})
|
||||
})
|
||||
t.equal(false,fs.existsSync(path1));
|
||||
t.equal(false,fs.existsSync(tmpPath1));
|
||||
const path2 = path.join(os.tmpdir(),'atomically-test-norename-2');
|
||||
const tmpPath2 = path2 + '.temp';
|
||||
throws(t, 'ENORENAME', 'temp files can also not be purged on error', () => {
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs: Object.assign ( {}, fs, { renameSync: fsMock.renameSync })});
|
||||
writeFileAtomicSync(path2, 'test', {tmpCreate: () => tmpPath2,tmpPurge: false})
|
||||
})
|
||||
t.equal(false,fs.existsSync(path2));
|
||||
t.equal(true,fs.existsSync(tmpPath2));
|
||||
const longPath = path.join(os.tmpdir(),'.012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789.txt');
|
||||
noexception(t, 'temp files are truncated', () => {
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs });
|
||||
writeFileAtomicSync(longPath, 'test')
|
||||
})
|
||||
const pathMissingFolders = path.join(os.tmpdir(),String(Math.random()),String(Math.random()),String(Math.random()),'foo.txt');
|
||||
noexception(t, 'parent folders are created', () => {
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs });
|
||||
writeFileAtomicSync(pathMissingFolders, 'test')
|
||||
})
|
||||
})
|
||||
|
||||
t.test('errors for root', t => {
|
||||
const { getuid } = process
|
||||
process.getuid = () => 0
|
||||
t.teardown(() => {
|
||||
process.getuid = getuid
|
||||
})
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs: fsMock });
|
||||
t.plan(2)
|
||||
throws(t, 'EINVAL', 'Chown error as root user', () => {
|
||||
writeFileAtomicSync('einval', 'test', { chown: { uid: 100, gid: 100 } })
|
||||
})
|
||||
throws(t, 'EINVAL', 'Chmod error as root user', () => {
|
||||
writeFileAtomicSync('einval', 'test', { mode: 0o741 })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('unstable sync tests', t => {
|
||||
t.plan(2);
|
||||
|
||||
const throws = function (t, msg, todo) {
|
||||
let err
|
||||
try { todo() } catch (e) { err = e }
|
||||
t.equal(!!err.code, true, msg)
|
||||
}
|
||||
|
||||
const noexception = function (t, msg, todo) {
|
||||
let err
|
||||
try { todo() } catch (e) { err = e }
|
||||
t.error(err, msg)
|
||||
}
|
||||
|
||||
noexception(t, 'No errors occur when retryable errors are thrown', () => {
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs: fsMockUnstable });
|
||||
writeFileAtomicSync('good', 'test')
|
||||
})
|
||||
|
||||
throws(t, 'retrying can be disabled', () => {
|
||||
const {writeFileSync: writeFileAtomicSync} = requireInject('./atomically.cjs', { fs: fsMockUnstable });
|
||||
writeFileAtomicSync('good', 'test', { timeout: 0 })
|
||||
})
|
||||
});
|
||||
|
||||
test('promises', async t => {
|
||||
let tmpfile
|
||||
closeCalled = 0
|
||||
expectClose = 0
|
||||
t.teardown(() => {
|
||||
t.parent.equal(closeCalled, expectClose, 'promises closed all files')
|
||||
closeCalled = 0
|
||||
expectClose = 0
|
||||
})
|
||||
|
||||
await writeFileAtomic('good', 'test', {
|
||||
tmpCreated (gottmpfile) {
|
||||
tmpfile = gottmpfile
|
||||
}
|
||||
})
|
||||
t.match(tmpfile, /^good\.tmp-\w+$/, 'tmpCreated is called for success')
|
||||
|
||||
await writeFileAtomic('good', 'test', {
|
||||
tmpCreated (gottmpfile) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
})
|
||||
|
||||
tmpfile = undefined
|
||||
await t.rejects(writeFileAtomic('noopen', 'test', {
|
||||
tmpCreated (gottmpfile) {
|
||||
tmpfile = gottmpfile
|
||||
}
|
||||
}))
|
||||
t.equal(tmpfile, undefined, 'tmpCreated is not called on open failure')
|
||||
|
||||
await t.rejects(writeFileAtomic('nowrite', 'test', {
|
||||
tmpCreated (gottmpfile) {
|
||||
tmpfile = gottmpfile
|
||||
}
|
||||
}))
|
||||
t.match(tmpfile, /^nowrite\.tmp-\w+$/, 'tmpCreated is called if failure is after open')
|
||||
})
|
||||
151
node_modules/atomically/test/concurrency.cjs
generated
vendored
Normal file
151
node_modules/atomically/test/concurrency.cjs
generated
vendored
Normal file
@@ -0,0 +1,151 @@
|
||||
process.setMaxListeners(1000000);
|
||||
|
||||
const fs = require('fs')
|
||||
const {test} = require('tap')
|
||||
const requireInject = require('require-inject')
|
||||
|
||||
// defining mock for fs so its functions can be modified
|
||||
const fsMock = Object.assign ( {}, fs, {
|
||||
/* ASYNC */
|
||||
mkdir (filename, opts, cb) {
|
||||
return cb(null);
|
||||
},
|
||||
realpath (filename, cb) {
|
||||
return cb(null, filename)
|
||||
},
|
||||
open (tmpfile, options, mode, cb) {
|
||||
if (/noopen/.test(tmpfile)) return cb(new Error('ENOOPEN'))
|
||||
cb(null, tmpfile)
|
||||
},
|
||||
write (fd) {
|
||||
const cb = arguments[arguments.length - 1]
|
||||
if (/nowrite/.test(fd)) return cb(new Error('ENOWRITE'))
|
||||
cb()
|
||||
},
|
||||
fsync (fd, cb) {
|
||||
if (/nofsync/.test(fd)) return cb(new Error('ENOFSYNC'))
|
||||
cb()
|
||||
},
|
||||
close (fd, cb) {
|
||||
cb()
|
||||
},
|
||||
chown (tmpfile, uid, gid, cb) {
|
||||
if (/nochown/.test(tmpfile)) return cb(new Error('ENOCHOWN'))
|
||||
cb()
|
||||
},
|
||||
chmod (tmpfile, mode, cb) {
|
||||
if (/nochmod/.test(tmpfile)) return cb(new Error('ENOCHMOD'))
|
||||
cb()
|
||||
},
|
||||
rename (tmpfile, filename, cb) {
|
||||
if (/norename/.test(tmpfile)) return cb(new Error('ENORENAME'))
|
||||
cb()
|
||||
},
|
||||
unlink (tmpfile, cb) {
|
||||
if (/nounlink/.test(tmpfile)) return cb(new Error('ENOUNLINK'))
|
||||
cb()
|
||||
},
|
||||
stat (tmpfile, cb) {
|
||||
if (/nostat/.test(tmpfile)) return cb(new Error('ENOSTAT'))
|
||||
cb()
|
||||
},
|
||||
/* SYNC */
|
||||
mkdirSync (filename) {},
|
||||
realpathSync (filename, cb) {
|
||||
return filename
|
||||
},
|
||||
openSync (tmpfile, options) {
|
||||
if (/noopen/.test(tmpfile)) throw new Error('ENOOPEN')
|
||||
return tmpfile
|
||||
},
|
||||
writeSync (fd) {
|
||||
if (/nowrite/.test(fd)) throw new Error('ENOWRITE')
|
||||
},
|
||||
fsyncSync (fd) {
|
||||
if (/nofsync/.test(fd)) throw new Error('ENOFSYNC')
|
||||
},
|
||||
closeSync () {},
|
||||
chownSync (tmpfile, uid, gid) {
|
||||
if (/nochown/.test(tmpfile)) throw new Error('ENOCHOWN')
|
||||
},
|
||||
chmodSync (tmpfile, mode) {
|
||||
if (/nochmod/.test(tmpfile)) throw new Error('ENOCHMOD')
|
||||
},
|
||||
renameSync (tmpfile, filename) {
|
||||
if (/norename/.test(tmpfile)) throw new Error('ENORENAME')
|
||||
},
|
||||
unlinkSync (tmpfile) {
|
||||
if (/nounlink/.test(tmpfile)) throw new Error('ENOUNLINK')
|
||||
},
|
||||
statSync (tmpfile) {
|
||||
if (/nostat/.test(tmpfile)) throw new Error('ENOSTAT')
|
||||
}
|
||||
})
|
||||
|
||||
const {writeFile: writeFileAtomic} = requireInject('./atomically.cjs', { fs: fsMock });
|
||||
|
||||
// preserve original functions
|
||||
const oldRealPath = fsMock.realpath
|
||||
const oldRename = fsMock.rename
|
||||
|
||||
test('ensure writes to the same file are serial', t => {
|
||||
let fileInUse = false
|
||||
const ops = 5 // count for how many concurrent write ops to request
|
||||
t.plan(ops * 3 + 3)
|
||||
fsMock.realpath = (...args) => {
|
||||
t.notOk(fileInUse, 'file not in use')
|
||||
fileInUse = true
|
||||
oldRealPath(...args)
|
||||
}
|
||||
fsMock.rename = (...args) => {
|
||||
t.ok(fileInUse, 'file in use')
|
||||
fileInUse = false
|
||||
oldRename(...args)
|
||||
}
|
||||
const {writeFile: writeFileAtomic} = requireInject('./atomically.cjs', { fs: fsMock });
|
||||
for (let i = 0; i < ops; i++) {
|
||||
writeFileAtomic('test', 'test', err => {
|
||||
if (err) t.fail(err)
|
||||
else t.pass('wrote without error')
|
||||
})
|
||||
}
|
||||
setTimeout(() => {
|
||||
writeFileAtomic('test', 'test', err => {
|
||||
if (err) t.fail(err)
|
||||
else t.pass('successive writes after delay')
|
||||
})
|
||||
}, 500)
|
||||
})
|
||||
|
||||
test('allow write to multiple files in parallel, but same file writes are serial', t => {
|
||||
const filesInUse = []
|
||||
const ops = 5
|
||||
let wasParallel = false
|
||||
fsMock.realpath = (filename, ...args) => {
|
||||
filesInUse.push(filename)
|
||||
const firstOccurence = filesInUse.indexOf(filename)
|
||||
t.equal(filesInUse.indexOf(filename, firstOccurence + 1), -1, 'serial writes') // check for another occurence after the first
|
||||
if (filesInUse.length > 1) wasParallel = true // remember that a parallel operation took place
|
||||
oldRealPath(filename, ...args)
|
||||
}
|
||||
fsMock.rename = (filename, ...args) => {
|
||||
filesInUse.splice(filesInUse.indexOf(filename), 1)
|
||||
oldRename(filename, ...args)
|
||||
}
|
||||
const {writeFile: writeFileAtomic} = requireInject('./atomically.cjs', { fs: fsMock });
|
||||
t.plan(ops * 2 * 2 + 1)
|
||||
let opCount = 0
|
||||
for (let i = 0; i < ops; i++) {
|
||||
writeFileAtomic('test', 'test', err => {
|
||||
if (err) t.fail(err, 'wrote without error')
|
||||
else t.pass('wrote without error')
|
||||
})
|
||||
writeFileAtomic('test2', 'test', err => {
|
||||
opCount++
|
||||
if (opCount === ops) t.ok(wasParallel, 'parallel writes')
|
||||
|
||||
if (err) t.fail(err, 'wrote without error')
|
||||
else t.pass('wrote without error')
|
||||
})
|
||||
}
|
||||
})
|
||||
289
node_modules/atomically/test/integration.cjs
generated
vendored
Normal file
289
node_modules/atomically/test/integration.cjs
generated
vendored
Normal file
@@ -0,0 +1,289 @@
|
||||
process.setMaxListeners(1000000);
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const {test} = require('tap')
|
||||
const rimraf = require('rimraf')
|
||||
const requireInject = require('require-inject')
|
||||
|
||||
const workdir = path.join(__dirname, path.basename(__filename, '.cjs'))
|
||||
let testfiles = 0
|
||||
function tmpFile () {
|
||||
return path.join(workdir, 'test-' + (++testfiles))
|
||||
}
|
||||
|
||||
function readFile (path) {
|
||||
return fs.readFileSync(path).toString()
|
||||
}
|
||||
|
||||
function didWriteFileAtomic (t, expected, filename, data, options, callback) {
|
||||
if (options instanceof Function) {
|
||||
callback = options
|
||||
options = null
|
||||
}
|
||||
if (!options) options = {}
|
||||
const actual = {}
|
||||
const {writeFile: writeFileAtomic} = requireInject('./atomically.cjs', {
|
||||
fs: Object.assign({}, fs, {
|
||||
chown (filename, uid, gid, cb) {
|
||||
actual.uid = uid
|
||||
actual.gid = gid
|
||||
process.nextTick(cb)
|
||||
},
|
||||
stat (filename, cb) {
|
||||
fs.stat(filename, (err, stats) => {
|
||||
if (err) return cb(err)
|
||||
cb(null, Object.assign(stats, expected || {}))
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
return writeFileAtomic(filename, data, options, err => {
|
||||
t.ok(true); // t.strictSame(actual, expected, 'ownership is as expected') //TODO: Turned off as it's implemented unreliably, preventing us from doing a safe optimization
|
||||
callback(err)
|
||||
})
|
||||
}
|
||||
|
||||
function didWriteFileAtomicSync (t, expected, filename, data, options) {
|
||||
const actual = {}
|
||||
const {writeFileSync} = requireInject('./atomically.cjs', {
|
||||
fs: Object.assign({}, fs, {
|
||||
chownSync (filename, uid, gid) {
|
||||
actual.uid = uid
|
||||
actual.gid = gid
|
||||
},
|
||||
statSync (filename) {
|
||||
const stats = fs.statSync(filename)
|
||||
return Object.assign(stats, expected || {})
|
||||
}
|
||||
})
|
||||
})
|
||||
writeFileSync(filename, data, options)
|
||||
t.ok(true); // t.strictSame(actual, expected) //TODO: Turned off as it's implemented unreliably, preventing us from doing a safe optimization
|
||||
}
|
||||
|
||||
function currentUser () {
|
||||
return {
|
||||
uid: process.getuid(),
|
||||
gid: process.getgid()
|
||||
}
|
||||
}
|
||||
|
||||
test('setup', t => {
|
||||
rimraf.sync(workdir)
|
||||
fs.mkdirSync(workdir, {recursive: true})
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('writes simple file (async)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, {}, file, '42', err => {
|
||||
t.error(err, 'no error')
|
||||
t.equal(readFile(file), '42', 'content ok')
|
||||
})
|
||||
})
|
||||
|
||||
test('writes simple file with encoding (async)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, {}, file, 'foo', 'utf16le', err => {
|
||||
t.error(err, 'no error')
|
||||
t.equal(readFile(file), 'f\u0000o\u0000o\u0000', 'content ok')
|
||||
})
|
||||
})
|
||||
|
||||
test('writes buffers to simple file (async)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, {}, file, Buffer.from('42'), err => {
|
||||
t.error(err, 'no error')
|
||||
t.equal(readFile(file), '42', 'content ok')
|
||||
})
|
||||
})
|
||||
|
||||
test('writes undefined to simple file (async)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, {}, file, undefined, err => {
|
||||
t.error(err, 'no error')
|
||||
t.equal(readFile(file), '', 'content ok')
|
||||
})
|
||||
})
|
||||
|
||||
test('writes to symlinks without clobbering (async)', t => {
|
||||
t.plan(5)
|
||||
const file = tmpFile()
|
||||
const link = tmpFile()
|
||||
fs.writeFileSync(file, '42')
|
||||
fs.symlinkSync(file, link)
|
||||
didWriteFileAtomic(t, currentUser(), link, '43', err => {
|
||||
t.error(err, 'no error')
|
||||
t.equal(readFile(file), '43', 'target content ok')
|
||||
t.equal(readFile(link), '43', 'link content ok')
|
||||
t.ok(fs.lstatSync(link).isSymbolicLink(), 'link is link')
|
||||
})
|
||||
})
|
||||
|
||||
test('runs chown on given file (async)', t => {
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, { uid: 42, gid: 43 }, file, '42', { chown: { uid: 42, gid: 43 } }, err => {
|
||||
t.error(err, 'no error')
|
||||
t.equal(readFile(file), '42', 'content ok')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
test('writes simple file with no chown (async)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, {}, file, '42', { chown: false }, err => {
|
||||
t.error(err, 'no error')
|
||||
t.equal(readFile(file), '42', 'content ok')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
test('runs chmod on given file (async)', t => {
|
||||
t.plan(5)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, {}, file, '42', { mode: parseInt('741', 8) }, err => {
|
||||
t.error(err, 'no error')
|
||||
const stat = fs.statSync(file)
|
||||
t.equal(stat.mode, parseInt('100741', 8))
|
||||
didWriteFileAtomic(t, { uid: 42, gid: 43 }, file, '23', { chown: { uid: 42, gid: 43 } }, err => {
|
||||
t.error(err, 'no error')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('run chmod AND chown (async)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, { uid: 42, gid: 43 }, file, '42', { mode: parseInt('741', 8), chown: { uid: 42, gid: 43 } }, err => {
|
||||
t.error(err, 'no error')
|
||||
const stat = fs.statSync(file)
|
||||
t.equal(stat.mode, parseInt('100741', 8))
|
||||
})
|
||||
})
|
||||
|
||||
test('does not change chmod by default (async)', t => {
|
||||
t.plan(5)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, {}, file, '42', { mode: parseInt('741', 8) }, err => {
|
||||
t.error(err, 'no error')
|
||||
|
||||
didWriteFileAtomic(t, currentUser(), file, '43', err => {
|
||||
t.error(err, 'no error')
|
||||
const stat = fs.statSync(file)
|
||||
t.equal(stat.mode, parseInt('100741', 8))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('does not change chown by default (async)', t => {
|
||||
t.plan(6)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomic(t, { uid: 42, gid: 43 }, file, '42', { chown: { uid: 42, gid: 43 } }, _setModeOnly)
|
||||
|
||||
function _setModeOnly (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
didWriteFileAtomic(t, { uid: 42, gid: 43 }, file, '43', { mode: parseInt('741', 8) }, _allDefault)
|
||||
}
|
||||
|
||||
function _allDefault (err) {
|
||||
t.error(err, 'no error')
|
||||
|
||||
didWriteFileAtomic(t, { uid: 42, gid: 43 }, file, '43', _noError)
|
||||
}
|
||||
|
||||
function _noError (err) {
|
||||
t.error(err, 'no error')
|
||||
}
|
||||
})
|
||||
|
||||
test('writes simple file (sync)', t => {
|
||||
t.plan(2)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, {}, file, '42')
|
||||
t.equal(readFile(file), '42')
|
||||
})
|
||||
|
||||
test('writes simple file with encoding (sync)', t => {
|
||||
t.plan(2)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, {}, file, 'foo', 'utf16le')
|
||||
t.equal(readFile(file), 'f\u0000o\u0000o\u0000')
|
||||
})
|
||||
|
||||
test('writes simple buffer file (sync)', t => {
|
||||
t.plan(2)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, {}, file, Buffer.from('42'))
|
||||
t.equal(readFile(file), '42')
|
||||
})
|
||||
|
||||
test('writes undefined file (sync)', t => {
|
||||
t.plan(2)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, {}, file, undefined)
|
||||
t.equal(readFile(file), '')
|
||||
})
|
||||
|
||||
test('writes to symlinks without clobbering (sync)', t => {
|
||||
t.plan(4)
|
||||
const file = tmpFile()
|
||||
const link = tmpFile()
|
||||
fs.writeFileSync(file, '42')
|
||||
fs.symlinkSync(file, link)
|
||||
didWriteFileAtomicSync(t, currentUser(), link, '43')
|
||||
t.equal(readFile(file), '43', 'target content ok')
|
||||
t.equal(readFile(link), '43', 'link content ok')
|
||||
t.ok(fs.lstatSync(link).isSymbolicLink(), 'link is link')
|
||||
})
|
||||
|
||||
test('runs chown on given file (sync)', t => {
|
||||
t.plan(1)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, { uid: 42, gid: 43 }, file, '42', { chown: { uid: 42, gid: 43 } })
|
||||
})
|
||||
|
||||
test('runs chmod on given file (sync)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, {}, file, '42', { mode: parseInt('741', 8) })
|
||||
const stat = fs.statSync(file)
|
||||
t.equal(stat.mode, parseInt('100741', 8))
|
||||
didWriteFileAtomicSync(t, { uid: 42, gid: 43 }, file, '23', { chown: { uid: 42, gid: 43 } })
|
||||
})
|
||||
|
||||
test('runs chown and chmod (sync)', t => {
|
||||
t.plan(2)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, { uid: 42, gid: 43 }, file, '42', { mode: parseInt('741', 8), chown: { uid: 42, gid: 43 } })
|
||||
const stat = fs.statSync(file)
|
||||
t.equal(stat.mode, parseInt('100741', 8))
|
||||
})
|
||||
|
||||
test('does not change chmod by default (sync)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, {}, file, '42', { mode: parseInt('741', 8) })
|
||||
didWriteFileAtomicSync(t, currentUser(), file, '43')
|
||||
const stat = fs.statSync(file)
|
||||
t.equal(stat.mode, parseInt('100741', 8))
|
||||
})
|
||||
|
||||
test('does not change chown by default (sync)', t => {
|
||||
t.plan(3)
|
||||
const file = tmpFile()
|
||||
didWriteFileAtomicSync(t, { uid: 42, gid: 43 }, file, '42', { chown: { uid: 42, gid: 43 } })
|
||||
didWriteFileAtomicSync(t, { uid: 42, gid: 43 }, file, '43', { mode: parseInt('741', 8) })
|
||||
didWriteFileAtomicSync(t, { uid: 42, gid: 43 }, file, '44')
|
||||
})
|
||||
|
||||
test('cleanup', t => {
|
||||
rimraf.sync(workdir)
|
||||
t.end()
|
||||
})
|
||||
3
node_modules/atomically/tsconfig.json
generated
vendored
Normal file
3
node_modules/atomically/tsconfig.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"extends": "tsex/tsconfig.json"
|
||||
}
|
||||
Reference in New Issue
Block a user