initial add
This commit is contained in:
commit
ccf2b60654
25 changed files with 4997 additions and 0 deletions
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
acw
|
84
acwatcher.js
Normal file
84
acwatcher.js
Normal file
|
@ -0,0 +1,84 @@
|
|||
#!/usr/bin/env bun
|
||||
|
||||
const chokidar = require('chokidar');
|
||||
const { execSync } = require('child_process');
|
||||
const path = process.cwd();
|
||||
let lastActivity = Date.now();
|
||||
const TIMEOUT = 600000; // 10 minutes
|
||||
const AMEND_COMMITS = true; // Amend auto-commits
|
||||
let isProcessing = false; // Prevent recursive calls
|
||||
const forceNewCommit = process.argv.includes('--new-commit'); // Check for --new-commit flag
|
||||
|
||||
// Check if in a Git repo
|
||||
try {
|
||||
execSync('git rev-parse --is-inside-work-tree', { cwd: path, stdio: 'ignore' });
|
||||
console.log(`Starting auto-commit in ${path}`);
|
||||
} catch (e) {
|
||||
console.error('Error: Not a Git repository. Please run in a Git repo.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Function to check and commit changes
|
||||
function commitChanges() {
|
||||
if (isProcessing) {
|
||||
console.log('Already processing, skipping...');
|
||||
return;
|
||||
}
|
||||
isProcessing = true;
|
||||
|
||||
try {
|
||||
const status = execSync('git status --porcelain', { cwd: path }).toString();
|
||||
if (status.trim()) {
|
||||
console.log('Changes detected:', status.trim());
|
||||
execSync('git add .', { cwd: path });
|
||||
if (forceNewCommit) {
|
||||
execSync(`git commit -m "Auto-commit: ${new Date().toISOString()}"`, { cwd: path });
|
||||
console.log('Created new commit (forced).');
|
||||
} else {
|
||||
const lastCommit = execSync('git log -1 --oneline', { cwd: path }).toString();
|
||||
if (AMEND_COMMITS && lastCommit.includes('Auto-commit')) {
|
||||
execSync('git commit --amend --no-edit', { cwd: path });
|
||||
console.log('Amended previous auto-commit.');
|
||||
} else {
|
||||
execSync(`git commit -m "Auto-commit: ${new Date().toISOString()}"`, { cwd: path });
|
||||
console.log('Created new commit.');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log('No changes to commit.');
|
||||
}
|
||||
lastActivity = Date.now();
|
||||
} catch (e) {
|
||||
console.error(`Commit error: ${e.message}`);
|
||||
} finally {
|
||||
isProcessing = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Run once and exit if --new-commit is provided
|
||||
if (forceNewCommit) {
|
||||
commitChanges();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Watch files, ignoring .git
|
||||
chokidar.watch(path, {
|
||||
ignoreInitial: false,
|
||||
ignored: [/.git\//, /.git$/], // Exclude .git directory and files
|
||||
awaitWriteFinish: { stabilityThreshold: 200, pollInterval: 100 }
|
||||
})
|
||||
.on('all', (event, filePath) => {
|
||||
console.log(`Event: ${event} on ${filePath}`);
|
||||
commitChanges();
|
||||
})
|
||||
.on('error', (error) => {
|
||||
console.error(`Watcher error: ${error.message}`);
|
||||
});
|
||||
|
||||
// Timeout check
|
||||
setInterval(() => {
|
||||
if (Date.now() - lastActivity >= TIMEOUT) {
|
||||
console.log('No activity for 10 minutes, exiting.');
|
||||
process.exit(0);
|
||||
}
|
||||
}, 1000);
|
72
acwatcher.js.old
Normal file
72
acwatcher.js.old
Normal file
|
@ -0,0 +1,72 @@
|
|||
#!/usr/bin/env bun
|
||||
|
||||
const chokidar = require('chokidar');
|
||||
const { execSync } = require('child_process');
|
||||
const path = process.cwd();
|
||||
let lastActivity = Date.now();
|
||||
const TIMEOUT = 600000; // 10 minutes
|
||||
const AMEND_COMMITS = true; // Amend auto-commits
|
||||
let isProcessing = false; // Prevent recursive calls
|
||||
|
||||
// Check if in a Git repo
|
||||
try {
|
||||
execSync('git rev-parse --is-inside-work-tree', { cwd: path, stdio: 'ignore' });
|
||||
console.log(`Starting auto-commit in ${path}`);
|
||||
} catch (e) {
|
||||
console.error('Error: Not a Git repository. Please run in a Git repo.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Function to check and commit changes
|
||||
function commitChanges() {
|
||||
if (isProcessing) {
|
||||
console.log('Already processing, skipping...');
|
||||
return;
|
||||
}
|
||||
isProcessing = true;
|
||||
|
||||
try {
|
||||
const status = execSync('git status --porcelain', { cwd: path }).toString();
|
||||
if (status.trim()) {
|
||||
console.log('Changes detected:', status.trim());
|
||||
execSync('git add .', { cwd: path });
|
||||
const lastCommit = execSync('git log -1 --oneline', { cwd: path }).toString();
|
||||
if (AMEND_COMMITS && lastCommit.includes('Auto-commit')) {
|
||||
execSync('git commit --amend --no-edit', { cwd: path });
|
||||
console.log('Amended previous auto-commit.');
|
||||
} else {
|
||||
execSync(`git commit -m "Auto-commit: ${new Date().toISOString()}"`, { cwd: path });
|
||||
console.log('Created new commit.');
|
||||
}
|
||||
} else {
|
||||
console.log('No changes to commit.');
|
||||
}
|
||||
lastActivity = Date.now();
|
||||
} catch (e) {
|
||||
console.error(`Commit error: ${e.message}`);
|
||||
} finally {
|
||||
isProcessing = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Watch files, ignoring .git
|
||||
chokidar.watch(path, {
|
||||
ignoreInitial: false,
|
||||
ignored: [/.git\//, /.git$/], // Exclude .git directory and files
|
||||
awaitWriteFinish: { stabilityThreshold: 200, pollInterval: 100 }
|
||||
})
|
||||
.on('all', (event, filePath) => {
|
||||
console.log(`Event: ${event} on ${filePath}`);
|
||||
commitChanges();
|
||||
})
|
||||
.on('error', (error) => {
|
||||
console.error(`Watcher error: ${error.message}`);
|
||||
});
|
||||
|
||||
// Timeout check
|
||||
setInterval(() => {
|
||||
if (Date.now() - lastActivity >= TIMEOUT) {
|
||||
console.log('No activity for 10 minutes, exiting.');
|
||||
process.exit(0);
|
||||
}
|
||||
}, 1000);
|
15
bun.lock
Normal file
15
bun.lock
Normal file
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"chokidar": "^4.0.3",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
|
||||
|
||||
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
|
||||
}
|
||||
}
|
21
node_modules/chokidar/LICENSE
generated
vendored
Normal file
21
node_modules/chokidar/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2012 Paul Miller (https://paulmillr.com), Elan Shanker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the “Software”), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
305
node_modules/chokidar/README.md
generated
vendored
Normal file
305
node_modules/chokidar/README.md
generated
vendored
Normal file
|
@ -0,0 +1,305 @@
|
|||
# Chokidar [](https://github.com/paulmillr/chokidar)
|
||||
|
||||
> Minimal and efficient cross-platform file watching library
|
||||
|
||||
## Why?
|
||||
|
||||
There are many reasons to prefer Chokidar to raw fs.watch / fs.watchFile in 2024:
|
||||
|
||||
- Events are properly reported
|
||||
- macOS events report filenames
|
||||
- events are not reported twice
|
||||
- changes are reported as add / change / unlink instead of useless `rename`
|
||||
- Atomic writes are supported, using `atomic` option
|
||||
- Some file editors use them
|
||||
- Chunked writes are supported, using `awaitWriteFinish` option
|
||||
- Large files are commonly written in chunks
|
||||
- File / dir filtering is supported
|
||||
- Symbolic links are supported
|
||||
- Recursive watching is always supported, instead of partial when using raw events
|
||||
- Includes a way to limit recursion depth
|
||||
|
||||
Chokidar relies on the Node.js core `fs` module, but when using
|
||||
`fs.watch` and `fs.watchFile` for watching, it normalizes the events it
|
||||
receives, often checking for truth by getting file stats and/or dir contents.
|
||||
The `fs.watch`-based implementation is the default, which
|
||||
avoids polling and keeps CPU usage down. Be advised that chokidar will initiate
|
||||
watchers recursively for everything within scope of the paths that have been
|
||||
specified, so be judicious about not wasting system resources by watching much
|
||||
more than needed. For some cases, `fs.watchFile`, which utilizes polling and uses more resources, is used.
|
||||
|
||||
Made for [Brunch](https://brunch.io/) in 2012,
|
||||
it is now used in [~30 million repositories](https://www.npmjs.com/browse/depended/chokidar) and
|
||||
has proven itself in production environments.
|
||||
|
||||
**Sep 2024 update:** v4 is out! It decreases dependency count from 13 to 1, removes
|
||||
support for globs, adds support for ESM / Common.js modules, and bumps minimum node.js version from v8 to v14.
|
||||
Check out [upgrading](#upgrading).
|
||||
|
||||
## Getting started
|
||||
|
||||
Install with npm:
|
||||
|
||||
```sh
|
||||
npm install chokidar
|
||||
```
|
||||
|
||||
Use it in your code:
|
||||
|
||||
```javascript
|
||||
import chokidar from 'chokidar';
|
||||
|
||||
// One-liner for current directory
|
||||
chokidar.watch('.').on('all', (event, path) => {
|
||||
console.log(event, path);
|
||||
});
|
||||
|
||||
|
||||
// Extended options
|
||||
// ----------------
|
||||
|
||||
// Initialize watcher.
|
||||
const watcher = chokidar.watch('file, dir, or array', {
|
||||
ignored: (path, stats) => stats?.isFile() && !path.endsWith('.js'), // only watch js files
|
||||
persistent: true
|
||||
});
|
||||
|
||||
// Something to use when events are received.
|
||||
const log = console.log.bind(console);
|
||||
// Add event listeners.
|
||||
watcher
|
||||
.on('add', path => log(`File ${path} has been added`))
|
||||
.on('change', path => log(`File ${path} has been changed`))
|
||||
.on('unlink', path => log(`File ${path} has been removed`));
|
||||
|
||||
// More possible events.
|
||||
watcher
|
||||
.on('addDir', path => log(`Directory ${path} has been added`))
|
||||
.on('unlinkDir', path => log(`Directory ${path} has been removed`))
|
||||
.on('error', error => log(`Watcher error: ${error}`))
|
||||
.on('ready', () => log('Initial scan complete. Ready for changes'))
|
||||
.on('raw', (event, path, details) => { // internal
|
||||
log('Raw event info:', event, path, details);
|
||||
});
|
||||
|
||||
// 'add', 'addDir' and 'change' events also receive stat() results as second
|
||||
// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats
|
||||
watcher.on('change', (path, stats) => {
|
||||
if (stats) console.log(`File ${path} changed size to ${stats.size}`);
|
||||
});
|
||||
|
||||
// Watch new files.
|
||||
watcher.add('new-file');
|
||||
watcher.add(['new-file-2', 'new-file-3']);
|
||||
|
||||
// Get list of actual paths being watched on the filesystem
|
||||
let watchedPaths = watcher.getWatched();
|
||||
|
||||
// Un-watch some files.
|
||||
await watcher.unwatch('new-file');
|
||||
|
||||
// Stop watching. The method is async!
|
||||
await watcher.close().then(() => console.log('closed'));
|
||||
|
||||
// Full list of options. See below for descriptions.
|
||||
// Do not use this example!
|
||||
chokidar.watch('file', {
|
||||
persistent: true,
|
||||
|
||||
// ignore .txt files
|
||||
ignored: (file) => file.endsWith('.txt'),
|
||||
// watch only .txt files
|
||||
// ignored: (file, _stats) => _stats?.isFile() && !file.endsWith('.txt'),
|
||||
|
||||
awaitWriteFinish: true, // emit single event when chunked writes are completed
|
||||
atomic: true, // emit proper events when "atomic writes" (mv _tmp file) are used
|
||||
|
||||
// The options also allow specifying custom intervals in ms
|
||||
// awaitWriteFinish: {
|
||||
// stabilityThreshold: 2000,
|
||||
// pollInterval: 100
|
||||
// },
|
||||
// atomic: 100,
|
||||
|
||||
interval: 100,
|
||||
binaryInterval: 300,
|
||||
|
||||
cwd: '.',
|
||||
depth: 99,
|
||||
|
||||
followSymlinks: true,
|
||||
ignoreInitial: false,
|
||||
ignorePermissionErrors: false,
|
||||
usePolling: false,
|
||||
alwaysStat: false,
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
`chokidar.watch(paths, [options])`
|
||||
|
||||
* `paths` (string or array of strings). Paths to files, dirs to be watched
|
||||
recursively.
|
||||
* `options` (object) Options object as defined below:
|
||||
|
||||
#### Persistence
|
||||
|
||||
* `persistent` (default: `true`). Indicates whether the process
|
||||
should continue to run as long as files are being watched.
|
||||
|
||||
#### Path filtering
|
||||
|
||||
* `ignored` function, regex, or path. Defines files/paths to be ignored.
|
||||
The whole relative or absolute path is tested, not just filename. If a function with two arguments
|
||||
is provided, it gets called twice per path - once with a single argument (the path), second
|
||||
time with two arguments (the path and the
|
||||
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
|
||||
object of that path).
|
||||
* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while
|
||||
instantiating the watching as chokidar discovers these file paths (before the `ready` event).
|
||||
* `followSymlinks` (default: `true`). When `false`, only the
|
||||
symlinks themselves will be watched for changes instead of following
|
||||
the link references and bubbling events through the link's path.
|
||||
* `cwd` (no default). The base directory from which watch `paths` are to be
|
||||
derived. Paths emitted with events will be relative to this.
|
||||
|
||||
#### Performance
|
||||
|
||||
* `usePolling` (default: `false`).
|
||||
Whether to use fs.watchFile (backed by polling), or fs.watch. If polling
|
||||
leads to high CPU utilization, consider setting this to `false`. It is
|
||||
typically necessary to **set this to `true` to successfully watch files over
|
||||
a network**, and it may be necessary to successfully watch files in other
|
||||
non-standard situations. Setting to `true` explicitly on MacOS overrides the
|
||||
`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable
|
||||
to true (1) or false (0) in order to override this option.
|
||||
* _Polling-specific settings_ (effective when `usePolling: true`)
|
||||
* `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also
|
||||
set the CHOKIDAR_INTERVAL env variable to override this option.
|
||||
* `binaryInterval` (default: `300`). Interval of file system
|
||||
polling for binary files.
|
||||
([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json))
|
||||
* `alwaysStat` (default: `false`). If relying upon the
|
||||
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
|
||||
object that may get passed with `add`, `addDir`, and `change` events, set
|
||||
this to `true` to ensure it is provided even in cases where it wasn't
|
||||
already available from the underlying watch events.
|
||||
* `depth` (default: `undefined`). If set, limits how many levels of
|
||||
subdirectories will be traversed.
|
||||
* `awaitWriteFinish` (default: `false`).
|
||||
By default, the `add` event will fire when a file first appears on disk, before
|
||||
the entire file has been written. Furthermore, in some cases some `change`
|
||||
events will be emitted while the file is being written. In some cases,
|
||||
especially when watching for large files there will be a need to wait for the
|
||||
write operation to finish before responding to a file creation or modification.
|
||||
Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size,
|
||||
holding its `add` and `change` events until the size does not change for a
|
||||
configurable amount of time. The appropriate duration setting is heavily
|
||||
dependent on the OS and hardware. For accurate detection this parameter should
|
||||
be relatively high, making file watching much less responsive.
|
||||
Use with caution.
|
||||
* *`options.awaitWriteFinish` can be set to an object in order to adjust
|
||||
timing params:*
|
||||
* `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in
|
||||
milliseconds for a file size to remain constant before emitting its event.
|
||||
* `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds.
|
||||
|
||||
#### Errors
|
||||
|
||||
* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files
|
||||
that don't have read permissions if possible. If watching fails due to `EPERM`
|
||||
or `EACCES` with this set to `true`, the errors will be suppressed silently.
|
||||
* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`).
|
||||
Automatically filters out artifacts that occur when using editors that use
|
||||
"atomic writes" instead of writing directly to the source file. If a file is
|
||||
re-added within 100 ms of being deleted, Chokidar emits a `change` event
|
||||
rather than `unlink` then `add`. If the default of 100 ms does not work well
|
||||
for you, you can override it by setting `atomic` to a custom value, in
|
||||
milliseconds.
|
||||
|
||||
### Methods & Events
|
||||
|
||||
`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`:
|
||||
|
||||
* `.add(path / paths)`: Add files, directories for tracking.
|
||||
Takes an array of strings or just one string.
|
||||
* `.on(event, callback)`: Listen for an FS event.
|
||||
Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`,
|
||||
`raw`, `error`.
|
||||
Additionally `all` is available which gets emitted with the underlying event
|
||||
name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully.
|
||||
* `.unwatch(path / paths)`: Stop watching files or directories.
|
||||
Takes an array of strings or just one string.
|
||||
* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen.
|
||||
* `.getWatched()`: Returns an object representing all the paths on the file
|
||||
system being watched by this `FSWatcher` instance. The object's keys are all the
|
||||
directories (using absolute paths unless the `cwd` option was used), and the
|
||||
values are arrays of the names of the items contained in each directory.
|
||||
|
||||
### CLI
|
||||
|
||||
Check out third party [chokidar-cli](https://github.com/open-cli-tools/chokidar-cli),
|
||||
which allows to execute a command on each change, or get a stdio stream of change events.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Sometimes, Chokidar runs out of file handles, causing `EMFILE` and `ENOSP` errors:
|
||||
|
||||
* `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell`
|
||||
* `Error: watch /home/ ENOSPC`
|
||||
|
||||
There are two things that can cause it.
|
||||
|
||||
1. Exhausted file handles for generic fs operations
|
||||
- Can be solved by using [graceful-fs](https://www.npmjs.com/package/graceful-fs),
|
||||
which can monkey-patch native `fs` module used by chokidar: `let fs = require('fs'); let grfs = require('graceful-fs'); grfs.gracefulify(fs);`
|
||||
- Can also be solved by tuning OS: `echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p`.
|
||||
2. Exhausted file handles for `fs.watch`
|
||||
- Can't seem to be solved by graceful-fs or OS tuning
|
||||
- It's possible to start using `usePolling: true`, which will switch backend to resource-intensive `fs.watchFile`
|
||||
|
||||
All fsevents-related issues (`WARN optional dep failed`, `fsevents is not a constructor`) are solved by upgrading to v4+.
|
||||
|
||||
## Changelog
|
||||
|
||||
- **v4 (Sep 2024):** remove glob support and bundled fsevents. Decrease dependency count from 13 to 1. Rewrite in typescript. Bumps minimum node.js requirement to v14+
|
||||
- **v3 (Apr 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16+.
|
||||
- **v2 (Dec 2017):** globs are now posix-style-only. Tons of bugfixes.
|
||||
- **v1 (Apr 2015):** glob support, symlink support, tons of bugfixes. Node 0.8+ is supported
|
||||
- **v0.1 (Apr 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66)
|
||||
|
||||
### Upgrading
|
||||
|
||||
If you've used globs before and want do replicate the functionality with v4:
|
||||
|
||||
```js
|
||||
// v3
|
||||
chok.watch('**/*.js');
|
||||
chok.watch("./directory/**/*");
|
||||
|
||||
// v4
|
||||
chok.watch('.', {
|
||||
ignored: (path, stats) => stats?.isFile() && !path.endsWith('.js'), // only watch js files
|
||||
});
|
||||
chok.watch('./directory');
|
||||
|
||||
// other way
|
||||
import { glob } from 'node:fs/promises';
|
||||
const watcher = watch(await Array.fromAsync(glob('**/*.js')));
|
||||
|
||||
// unwatching
|
||||
// v3
|
||||
chok.unwatch('**/*.js');
|
||||
// v4
|
||||
chok.unwatch(await glob('**/*.js'));
|
||||
```
|
||||
|
||||
## Also
|
||||
|
||||
Why was chokidar named this way? What's the meaning behind it?
|
||||
|
||||
>Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four). This word is also used in other languages like Urdu as (چوکیدار) which is widely used in Pakistan and India.
|
||||
|
||||
## License
|
||||
|
||||
MIT (c) Paul Miller (<https://paulmillr.com>), see [LICENSE](LICENSE) file.
|
90
node_modules/chokidar/esm/handler.d.ts
generated
vendored
Normal file
90
node_modules/chokidar/esm/handler.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
import type { WatchEventType, Stats, FSWatcher as NativeFsWatcher } from 'fs';
|
||||
import type { FSWatcher, WatchHelper, Throttler } from './index.js';
|
||||
import type { EntryInfo } from 'readdirp';
|
||||
export type Path = string;
|
||||
export declare const STR_DATA = "data";
|
||||
export declare const STR_END = "end";
|
||||
export declare const STR_CLOSE = "close";
|
||||
export declare const EMPTY_FN: () => void;
|
||||
export declare const IDENTITY_FN: (val: unknown) => unknown;
|
||||
export declare const isWindows: boolean;
|
||||
export declare const isMacos: boolean;
|
||||
export declare const isLinux: boolean;
|
||||
export declare const isFreeBSD: boolean;
|
||||
export declare const isIBMi: boolean;
|
||||
export declare const EVENTS: {
|
||||
readonly ALL: "all";
|
||||
readonly READY: "ready";
|
||||
readonly ADD: "add";
|
||||
readonly CHANGE: "change";
|
||||
readonly ADD_DIR: "addDir";
|
||||
readonly UNLINK: "unlink";
|
||||
readonly UNLINK_DIR: "unlinkDir";
|
||||
readonly RAW: "raw";
|
||||
readonly ERROR: "error";
|
||||
};
|
||||
export type EventName = (typeof EVENTS)[keyof typeof EVENTS];
|
||||
export type FsWatchContainer = {
|
||||
listeners: (path: string) => void | Set<any>;
|
||||
errHandlers: (err: unknown) => void | Set<any>;
|
||||
rawEmitters: (ev: WatchEventType, path: string, opts: unknown) => void | Set<any>;
|
||||
watcher: NativeFsWatcher;
|
||||
watcherUnusable?: boolean;
|
||||
};
|
||||
export interface WatchHandlers {
|
||||
listener: (path: string) => void;
|
||||
errHandler: (err: unknown) => void;
|
||||
rawEmitter: (ev: WatchEventType, path: string, opts: unknown) => void;
|
||||
}
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
export declare class NodeFsHandler {
|
||||
fsw: FSWatcher;
|
||||
_boundHandleError: (error: unknown) => void;
|
||||
constructor(fsW: FSWatcher);
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path: string, listener: (path: string, newStats?: any) => void | Promise<void>): (() => void) | undefined;
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file: Path, stats: Stats, initialAdd: boolean): (() => void) | undefined;
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
_handleSymlink(entry: EntryInfo, directory: string, path: Path, item: string): Promise<boolean | undefined>;
|
||||
_handleRead(directory: string, initialAdd: boolean, wh: WatchHelper, target: Path, dir: Path, depth: number, throttler: Throttler): Promise<unknown> | undefined;
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
_handleDir(dir: string, stats: Stats, initialAdd: boolean, depth: number, target: string, wh: WatchHelper, realpath: string): Promise<(() => void) | undefined>;
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
_addToNodeFs(path: string, initialAdd: boolean, priorWh: WatchHelper | undefined, depth: number, target?: string): Promise<string | false | undefined>;
|
||||
}
|
629
node_modules/chokidar/esm/handler.js
generated
vendored
Normal file
629
node_modules/chokidar/esm/handler.js
generated
vendored
Normal file
|
@ -0,0 +1,629 @@
|
|||
import { watchFile, unwatchFile, watch as fs_watch } from 'fs';
|
||||
import { open, stat, lstat, realpath as fsrealpath } from 'fs/promises';
|
||||
import * as sysPath from 'path';
|
||||
import { type as osType } from 'os';
|
||||
export const STR_DATA = 'data';
|
||||
export const STR_END = 'end';
|
||||
export const STR_CLOSE = 'close';
|
||||
export const EMPTY_FN = () => { };
|
||||
export const IDENTITY_FN = (val) => val;
|
||||
const pl = process.platform;
|
||||
export const isWindows = pl === 'win32';
|
||||
export const isMacos = pl === 'darwin';
|
||||
export const isLinux = pl === 'linux';
|
||||
export const isFreeBSD = pl === 'freebsd';
|
||||
export const isIBMi = osType() === 'OS400';
|
||||
export const EVENTS = {
|
||||
ALL: 'all',
|
||||
READY: 'ready',
|
||||
ADD: 'add',
|
||||
CHANGE: 'change',
|
||||
ADD_DIR: 'addDir',
|
||||
UNLINK: 'unlink',
|
||||
UNLINK_DIR: 'unlinkDir',
|
||||
RAW: 'raw',
|
||||
ERROR: 'error',
|
||||
};
|
||||
const EV = EVENTS;
|
||||
const THROTTLE_MODE_WATCH = 'watch';
|
||||
const statMethods = { lstat, stat };
|
||||
const KEY_LISTENERS = 'listeners';
|
||||
const KEY_ERR = 'errHandlers';
|
||||
const KEY_RAW = 'rawEmitters';
|
||||
const HANDLER_KEYS = [KEY_LISTENERS, KEY_ERR, KEY_RAW];
|
||||
// prettier-ignore
|
||||
const binaryExtensions = new Set([
|
||||
'3dm', '3ds', '3g2', '3gp', '7z', 'a', 'aac', 'adp', 'afdesign', 'afphoto', 'afpub', 'ai',
|
||||
'aif', 'aiff', 'alz', 'ape', 'apk', 'appimage', 'ar', 'arj', 'asf', 'au', 'avi',
|
||||
'bak', 'baml', 'bh', 'bin', 'bk', 'bmp', 'btif', 'bz2', 'bzip2',
|
||||
'cab', 'caf', 'cgm', 'class', 'cmx', 'cpio', 'cr2', 'cur', 'dat', 'dcm', 'deb', 'dex', 'djvu',
|
||||
'dll', 'dmg', 'dng', 'doc', 'docm', 'docx', 'dot', 'dotm', 'dra', 'DS_Store', 'dsk', 'dts',
|
||||
'dtshd', 'dvb', 'dwg', 'dxf',
|
||||
'ecelp4800', 'ecelp7470', 'ecelp9600', 'egg', 'eol', 'eot', 'epub', 'exe',
|
||||
'f4v', 'fbs', 'fh', 'fla', 'flac', 'flatpak', 'fli', 'flv', 'fpx', 'fst', 'fvt',
|
||||
'g3', 'gh', 'gif', 'graffle', 'gz', 'gzip',
|
||||
'h261', 'h263', 'h264', 'icns', 'ico', 'ief', 'img', 'ipa', 'iso',
|
||||
'jar', 'jpeg', 'jpg', 'jpgv', 'jpm', 'jxr', 'key', 'ktx',
|
||||
'lha', 'lib', 'lvp', 'lz', 'lzh', 'lzma', 'lzo',
|
||||
'm3u', 'm4a', 'm4v', 'mar', 'mdi', 'mht', 'mid', 'midi', 'mj2', 'mka', 'mkv', 'mmr', 'mng',
|
||||
'mobi', 'mov', 'movie', 'mp3',
|
||||
'mp4', 'mp4a', 'mpeg', 'mpg', 'mpga', 'mxu',
|
||||
'nef', 'npx', 'numbers', 'nupkg',
|
||||
'o', 'odp', 'ods', 'odt', 'oga', 'ogg', 'ogv', 'otf', 'ott',
|
||||
'pages', 'pbm', 'pcx', 'pdb', 'pdf', 'pea', 'pgm', 'pic', 'png', 'pnm', 'pot', 'potm',
|
||||
'potx', 'ppa', 'ppam',
|
||||
'ppm', 'pps', 'ppsm', 'ppsx', 'ppt', 'pptm', 'pptx', 'psd', 'pya', 'pyc', 'pyo', 'pyv',
|
||||
'qt',
|
||||
'rar', 'ras', 'raw', 'resources', 'rgb', 'rip', 'rlc', 'rmf', 'rmvb', 'rpm', 'rtf', 'rz',
|
||||
's3m', 's7z', 'scpt', 'sgi', 'shar', 'snap', 'sil', 'sketch', 'slk', 'smv', 'snk', 'so',
|
||||
'stl', 'suo', 'sub', 'swf',
|
||||
'tar', 'tbz', 'tbz2', 'tga', 'tgz', 'thmx', 'tif', 'tiff', 'tlz', 'ttc', 'ttf', 'txz',
|
||||
'udf', 'uvh', 'uvi', 'uvm', 'uvp', 'uvs', 'uvu',
|
||||
'viv', 'vob',
|
||||
'war', 'wav', 'wax', 'wbmp', 'wdp', 'weba', 'webm', 'webp', 'whl', 'wim', 'wm', 'wma',
|
||||
'wmv', 'wmx', 'woff', 'woff2', 'wrm', 'wvx',
|
||||
'xbm', 'xif', 'xla', 'xlam', 'xls', 'xlsb', 'xlsm', 'xlsx', 'xlt', 'xltm', 'xltx', 'xm',
|
||||
'xmind', 'xpi', 'xpm', 'xwd', 'xz',
|
||||
'z', 'zip', 'zipx',
|
||||
]);
|
||||
const isBinaryPath = (filePath) => binaryExtensions.has(sysPath.extname(filePath).slice(1).toLowerCase());
|
||||
// TODO: emit errors properly. Example: EMFILE on Macos.
|
||||
const foreach = (val, fn) => {
|
||||
if (val instanceof Set) {
|
||||
val.forEach(fn);
|
||||
}
|
||||
else {
|
||||
fn(val);
|
||||
}
|
||||
};
|
||||
const addAndConvert = (main, prop, item) => {
|
||||
let container = main[prop];
|
||||
if (!(container instanceof Set)) {
|
||||
main[prop] = container = new Set([container]);
|
||||
}
|
||||
container.add(item);
|
||||
};
|
||||
const clearItem = (cont) => (key) => {
|
||||
const set = cont[key];
|
||||
if (set instanceof Set) {
|
||||
set.clear();
|
||||
}
|
||||
else {
|
||||
delete cont[key];
|
||||
}
|
||||
};
|
||||
const delFromSet = (main, prop, item) => {
|
||||
const container = main[prop];
|
||||
if (container instanceof Set) {
|
||||
container.delete(item);
|
||||
}
|
||||
else if (container === item) {
|
||||
delete main[prop];
|
||||
}
|
||||
};
|
||||
const isEmptySet = (val) => (val instanceof Set ? val.size === 0 : !val);
|
||||
const FsWatchInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watch interface
|
||||
* @param path to be watched
|
||||
* @param options to be passed to fs_watch
|
||||
* @param listener main event handler
|
||||
* @param errHandler emits info about errors
|
||||
* @param emitRaw emits raw event data
|
||||
* @returns {NativeFsWatcher}
|
||||
*/
|
||||
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
|
||||
const handleEvent = (rawEvent, evPath) => {
|
||||
listener(path);
|
||||
emitRaw(rawEvent, evPath, { watchedPath: path });
|
||||
// emit based on events occurring for files from a directory's watcher in
|
||||
// case the file's watcher misses it (and rely on throttling to de-dupe)
|
||||
if (evPath && path !== evPath) {
|
||||
fsWatchBroadcast(sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath));
|
||||
}
|
||||
};
|
||||
try {
|
||||
return fs_watch(path, {
|
||||
persistent: options.persistent,
|
||||
}, handleEvent);
|
||||
}
|
||||
catch (error) {
|
||||
errHandler(error);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper for passing fs_watch event data to a collection of listeners
|
||||
* @param fullPath absolute path bound to fs_watch instance
|
||||
*/
|
||||
const fsWatchBroadcast = (fullPath, listenerType, val1, val2, val3) => {
|
||||
const cont = FsWatchInstances.get(fullPath);
|
||||
if (!cont)
|
||||
return;
|
||||
foreach(cont[listenerType], (listener) => {
|
||||
listener(val1, val2, val3);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Instantiates the fs_watch interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path
|
||||
* @param fullPath absolute path
|
||||
* @param options to be passed to fs_watch
|
||||
* @param handlers container for event listener functions
|
||||
*/
|
||||
const setFsWatchListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, errHandler, rawEmitter } = handlers;
|
||||
let cont = FsWatchInstances.get(fullPath);
|
||||
let watcher;
|
||||
if (!options.persistent) {
|
||||
watcher = createFsWatchInstance(path, options, listener, errHandler, rawEmitter);
|
||||
if (!watcher)
|
||||
return;
|
||||
return watcher.close.bind(watcher);
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_ERR, errHandler);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
watcher = createFsWatchInstance(path, options, fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), errHandler, // no need to use broadcast here
|
||||
fsWatchBroadcast.bind(null, fullPath, KEY_RAW));
|
||||
if (!watcher)
|
||||
return;
|
||||
watcher.on(EV.ERROR, async (error) => {
|
||||
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
|
||||
if (cont)
|
||||
cont.watcherUnusable = true; // documented since Node 10.4.1
|
||||
// Workaround for https://github.com/joyent/node/issues/4337
|
||||
if (isWindows && error.code === 'EPERM') {
|
||||
try {
|
||||
const fd = await open(path, 'r');
|
||||
await fd.close();
|
||||
broadcastErr(error);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
else {
|
||||
broadcastErr(error);
|
||||
}
|
||||
});
|
||||
cont = {
|
||||
listeners: listener,
|
||||
errHandlers: errHandler,
|
||||
rawEmitters: rawEmitter,
|
||||
watcher,
|
||||
};
|
||||
FsWatchInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// removes this instance's listeners and closes the underlying fs_watch
|
||||
// instance if there are no more listeners left
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_ERR, errHandler);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
// Check to protect against issue gh-730.
|
||||
// if (cont.watcherUnusable) {
|
||||
cont.watcher.close();
|
||||
// }
|
||||
FsWatchInstances.delete(fullPath);
|
||||
HANDLER_KEYS.forEach(clearItem(cont));
|
||||
// @ts-ignore
|
||||
cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
// fs_watchFile helpers
|
||||
// object to hold per-process fs_watchFile instances
|
||||
// (may be shared across chokidar FSWatcher instances)
|
||||
const FsWatchFileInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watchFile interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path to be watched
|
||||
* @param fullPath absolute path
|
||||
* @param options options to be passed to fs_watchFile
|
||||
* @param handlers container for event listener functions
|
||||
* @returns closer
|
||||
*/
|
||||
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, rawEmitter } = handlers;
|
||||
let cont = FsWatchFileInstances.get(fullPath);
|
||||
// let listeners = new Set();
|
||||
// let rawEmitters = new Set();
|
||||
const copts = cont && cont.options;
|
||||
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
|
||||
// "Upgrade" the watcher to persistence or a quicker interval.
|
||||
// This creates some unlikely edge case issues if the user mixes
|
||||
// settings in a very weird way, but solving for those cases
|
||||
// doesn't seem worthwhile for the added complexity.
|
||||
// listeners = cont.listeners;
|
||||
// rawEmitters = cont.rawEmitters;
|
||||
unwatchFile(fullPath);
|
||||
cont = undefined;
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
// TODO
|
||||
// listeners.add(listener);
|
||||
// rawEmitters.add(rawEmitter);
|
||||
cont = {
|
||||
listeners: listener,
|
||||
rawEmitters: rawEmitter,
|
||||
options,
|
||||
watcher: watchFile(fullPath, options, (curr, prev) => {
|
||||
foreach(cont.rawEmitters, (rawEmitter) => {
|
||||
rawEmitter(EV.CHANGE, fullPath, { curr, prev });
|
||||
});
|
||||
const currmtime = curr.mtimeMs;
|
||||
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
|
||||
foreach(cont.listeners, (listener) => listener(path, curr));
|
||||
}
|
||||
}),
|
||||
};
|
||||
FsWatchFileInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// Removes this instance's listeners and closes the underlying fs_watchFile
|
||||
// instance if there are no more listeners left.
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
FsWatchFileInstances.delete(fullPath);
|
||||
unwatchFile(fullPath);
|
||||
cont.options = cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
export class NodeFsHandler {
|
||||
constructor(fsW) {
|
||||
this.fsw = fsW;
|
||||
this._boundHandleError = (error) => fsW._handleError(error);
|
||||
}
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path, listener) {
|
||||
const opts = this.fsw.options;
|
||||
const directory = sysPath.dirname(path);
|
||||
const basename = sysPath.basename(path);
|
||||
const parent = this.fsw._getWatchedDir(directory);
|
||||
parent.add(basename);
|
||||
const absolutePath = sysPath.resolve(path);
|
||||
const options = {
|
||||
persistent: opts.persistent,
|
||||
};
|
||||
if (!listener)
|
||||
listener = EMPTY_FN;
|
||||
let closer;
|
||||
if (opts.usePolling) {
|
||||
const enableBin = opts.interval !== opts.binaryInterval;
|
||||
options.interval = enableBin && isBinaryPath(basename) ? opts.binaryInterval : opts.interval;
|
||||
closer = setFsWatchFileListener(path, absolutePath, options, {
|
||||
listener,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
else {
|
||||
closer = setFsWatchListener(path, absolutePath, options, {
|
||||
listener,
|
||||
errHandler: this._boundHandleError,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file, stats, initialAdd) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const dirname = sysPath.dirname(file);
|
||||
const basename = sysPath.basename(file);
|
||||
const parent = this.fsw._getWatchedDir(dirname);
|
||||
// stats is always present
|
||||
let prevStats = stats;
|
||||
// if the file is already being watched, do nothing
|
||||
if (parent.has(basename))
|
||||
return;
|
||||
const listener = async (path, newStats) => {
|
||||
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5))
|
||||
return;
|
||||
if (!newStats || newStats.mtimeMs === 0) {
|
||||
try {
|
||||
const newStats = await stat(file);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
if ((isMacos || isLinux || isFreeBSD) && prevStats.ino !== newStats.ino) {
|
||||
this.fsw._closeFile(path);
|
||||
prevStats = newStats;
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
}
|
||||
else {
|
||||
prevStats = newStats;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Fix issues where mtime is null but file is still present
|
||||
this.fsw._remove(dirname, basename);
|
||||
}
|
||||
// add is about to be emitted if file not already tracked in parent
|
||||
}
|
||||
else if (parent.has(basename)) {
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
prevStats = newStats;
|
||||
}
|
||||
};
|
||||
// kick off the watcher
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
// emit an add event if we're supposed to
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
|
||||
if (!this.fsw._throttle(EV.ADD, file, 0))
|
||||
return;
|
||||
this.fsw._emit(EV.ADD, file, stats);
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
async _handleSymlink(entry, directory, path, item) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const full = entry.fullPath;
|
||||
const dir = this.fsw._getWatchedDir(directory);
|
||||
if (!this.fsw.options.followSymlinks) {
|
||||
// watch symlink directly (don't follow) and detect changes
|
||||
this.fsw._incrReadyCount();
|
||||
let linkPath;
|
||||
try {
|
||||
linkPath = await fsrealpath(path);
|
||||
}
|
||||
catch (e) {
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (dir.has(item)) {
|
||||
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.CHANGE, path, entry.stats);
|
||||
}
|
||||
}
|
||||
else {
|
||||
dir.add(item);
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.ADD, path, entry.stats);
|
||||
}
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
// don't follow the same symlink more than once
|
||||
if (this.fsw._symlinkPaths.has(full)) {
|
||||
return true;
|
||||
}
|
||||
this.fsw._symlinkPaths.set(full, true);
|
||||
}
|
||||
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
|
||||
// Normalize the directory name on Windows
|
||||
directory = sysPath.join(directory, '');
|
||||
throttler = this.fsw._throttle('readdir', directory, 1000);
|
||||
if (!throttler)
|
||||
return;
|
||||
const previous = this.fsw._getWatchedDir(wh.path);
|
||||
const current = new Set();
|
||||
let stream = this.fsw._readdirp(directory, {
|
||||
fileFilter: (entry) => wh.filterPath(entry),
|
||||
directoryFilter: (entry) => wh.filterDir(entry),
|
||||
});
|
||||
if (!stream)
|
||||
return;
|
||||
stream
|
||||
.on(STR_DATA, async (entry) => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const item = entry.path;
|
||||
let path = sysPath.join(directory, item);
|
||||
current.add(item);
|
||||
if (entry.stats.isSymbolicLink() &&
|
||||
(await this._handleSymlink(entry, directory, path, item))) {
|
||||
return;
|
||||
}
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
// Files that present in current directory snapshot
|
||||
// but absent in previous are added to watch list and
|
||||
// emit `add` event.
|
||||
if (item === target || (!target && !previous.has(item))) {
|
||||
this.fsw._incrReadyCount();
|
||||
// ensure relativeness of path is preserved in case of watcher reuse
|
||||
path = sysPath.join(dir, sysPath.relative(dir, path));
|
||||
this._addToNodeFs(path, initialAdd, wh, depth + 1);
|
||||
}
|
||||
})
|
||||
.on(EV.ERROR, this._boundHandleError);
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!stream)
|
||||
return reject();
|
||||
stream.once(STR_END, () => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const wasThrottled = throttler ? throttler.clear() : false;
|
||||
resolve(undefined);
|
||||
// Files that absent in current directory snapshot
|
||||
// but present in previous emit `remove` event
|
||||
// and are removed from @watched[directory].
|
||||
previous
|
||||
.getChildren()
|
||||
.filter((item) => {
|
||||
return item !== directory && !current.has(item);
|
||||
})
|
||||
.forEach((item) => {
|
||||
this.fsw._remove(directory, item);
|
||||
});
|
||||
stream = undefined;
|
||||
// one more time for any missed in case changes came in extremely quickly
|
||||
if (wasThrottled)
|
||||
this._handleRead(directory, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
|
||||
const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir));
|
||||
const tracked = parentDir.has(sysPath.basename(dir));
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
|
||||
this.fsw._emit(EV.ADD_DIR, dir, stats);
|
||||
}
|
||||
// ensure dir is tracked (harmless if redundant)
|
||||
parentDir.add(sysPath.basename(dir));
|
||||
this.fsw._getWatchedDir(dir);
|
||||
let throttler;
|
||||
let closer;
|
||||
const oDepth = this.fsw.options.depth;
|
||||
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
|
||||
if (!target) {
|
||||
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
}
|
||||
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
|
||||
// if current directory is removed, do nothing
|
||||
if (stats && stats.mtimeMs === 0)
|
||||
return;
|
||||
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
|
||||
const ready = this.fsw._emitReady;
|
||||
if (this.fsw._isIgnored(path) || this.fsw.closed) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const wh = this.fsw._getWatchHelpers(path);
|
||||
if (priorWh) {
|
||||
wh.filterPath = (entry) => priorWh.filterPath(entry);
|
||||
wh.filterDir = (entry) => priorWh.filterDir(entry);
|
||||
}
|
||||
// evaluate what is at the path we're being asked to watch
|
||||
try {
|
||||
const stats = await statMethods[wh.statMethod](wh.watchPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (this.fsw._isIgnored(wh.watchPath, stats)) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const follow = this.fsw.options.followSymlinks;
|
||||
let closer;
|
||||
if (stats.isDirectory()) {
|
||||
const absPath = sysPath.resolve(path);
|
||||
const targetPath = follow ? await fsrealpath(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (absPath !== targetPath && targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(absPath, targetPath);
|
||||
}
|
||||
}
|
||||
else if (stats.isSymbolicLink()) {
|
||||
const targetPath = follow ? await fsrealpath(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
const parent = sysPath.dirname(wh.watchPath);
|
||||
this.fsw._getWatchedDir(parent).add(wh.watchPath);
|
||||
this.fsw._emit(EV.ADD, wh.watchPath, stats);
|
||||
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
closer = this._handleFile(wh.watchPath, stats, initialAdd);
|
||||
}
|
||||
ready();
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
return false;
|
||||
}
|
||||
catch (error) {
|
||||
if (this.fsw._handleError(error)) {
|
||||
ready();
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
215
node_modules/chokidar/esm/index.d.ts
generated
vendored
Normal file
215
node_modules/chokidar/esm/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,215 @@
|
|||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
import { Stats } from 'fs';
|
||||
import { EventEmitter } from 'events';
|
||||
import { ReaddirpStream, ReaddirpOptions, EntryInfo } from 'readdirp';
|
||||
import { NodeFsHandler, EventName, Path, EVENTS as EV, WatchHandlers } from './handler.js';
|
||||
type AWF = {
|
||||
stabilityThreshold: number;
|
||||
pollInterval: number;
|
||||
};
|
||||
type BasicOpts = {
|
||||
persistent: boolean;
|
||||
ignoreInitial: boolean;
|
||||
followSymlinks: boolean;
|
||||
cwd?: string;
|
||||
usePolling: boolean;
|
||||
interval: number;
|
||||
binaryInterval: number;
|
||||
alwaysStat?: boolean;
|
||||
depth?: number;
|
||||
ignorePermissionErrors: boolean;
|
||||
atomic: boolean | number;
|
||||
};
|
||||
export type Throttler = {
|
||||
timeoutObject: NodeJS.Timeout;
|
||||
clear: () => void;
|
||||
count: number;
|
||||
};
|
||||
export type ChokidarOptions = Partial<BasicOpts & {
|
||||
ignored: Matcher | Matcher[];
|
||||
awaitWriteFinish: boolean | Partial<AWF>;
|
||||
}>;
|
||||
export type FSWInstanceOptions = BasicOpts & {
|
||||
ignored: Matcher[];
|
||||
awaitWriteFinish: false | AWF;
|
||||
};
|
||||
export type ThrottleType = 'readdir' | 'watch' | 'add' | 'remove' | 'change';
|
||||
export type EmitArgs = [path: Path, stats?: Stats];
|
||||
export type EmitErrorArgs = [error: Error, stats?: Stats];
|
||||
export type EmitArgsWithName = [event: EventName, ...EmitArgs];
|
||||
export type MatchFunction = (val: string, stats?: Stats) => boolean;
|
||||
export interface MatcherObject {
|
||||
path: string;
|
||||
recursive?: boolean;
|
||||
}
|
||||
export type Matcher = string | RegExp | MatchFunction | MatcherObject;
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
declare class DirEntry {
|
||||
path: Path;
|
||||
_removeWatcher: (dir: string, base: string) => void;
|
||||
items: Set<Path>;
|
||||
constructor(dir: Path, removeWatcher: (dir: string, base: string) => void);
|
||||
add(item: string): void;
|
||||
remove(item: string): Promise<void>;
|
||||
has(item: string): boolean | undefined;
|
||||
getChildren(): string[];
|
||||
dispose(): void;
|
||||
}
|
||||
export declare class WatchHelper {
|
||||
fsw: FSWatcher;
|
||||
path: string;
|
||||
watchPath: string;
|
||||
fullWatchPath: string;
|
||||
dirParts: string[][];
|
||||
followSymlinks: boolean;
|
||||
statMethod: 'stat' | 'lstat';
|
||||
constructor(path: string, follow: boolean, fsw: FSWatcher);
|
||||
entryPath(entry: EntryInfo): Path;
|
||||
filterPath(entry: EntryInfo): boolean;
|
||||
filterDir(entry: EntryInfo): boolean;
|
||||
}
|
||||
export interface FSWatcherKnownEventMap {
|
||||
[EV.READY]: [];
|
||||
[EV.RAW]: Parameters<WatchHandlers['rawEmitter']>;
|
||||
[EV.ERROR]: Parameters<WatchHandlers['errHandler']>;
|
||||
[EV.ALL]: [event: EventName, ...EmitArgs];
|
||||
}
|
||||
export type FSWatcherEventMap = FSWatcherKnownEventMap & {
|
||||
[k in Exclude<EventName, keyof FSWatcherKnownEventMap>]: EmitArgs;
|
||||
};
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
export declare class FSWatcher extends EventEmitter<FSWatcherEventMap> {
|
||||
closed: boolean;
|
||||
options: FSWInstanceOptions;
|
||||
_closers: Map<string, Array<any>>;
|
||||
_ignoredPaths: Set<Matcher>;
|
||||
_throttled: Map<ThrottleType, Map<any, any>>;
|
||||
_streams: Set<ReaddirpStream>;
|
||||
_symlinkPaths: Map<Path, string | boolean>;
|
||||
_watched: Map<string, DirEntry>;
|
||||
_pendingWrites: Map<string, any>;
|
||||
_pendingUnlinks: Map<string, EmitArgsWithName>;
|
||||
_readyCount: number;
|
||||
_emitReady: () => void;
|
||||
_closePromise?: Promise<void>;
|
||||
_userIgnored?: MatchFunction;
|
||||
_readyEmitted: boolean;
|
||||
_emitRaw: WatchHandlers['rawEmitter'];
|
||||
_boundRemove: (dir: string, item: string) => void;
|
||||
_nodeFsHandler: NodeFsHandler;
|
||||
constructor(_opts?: ChokidarOptions);
|
||||
_addIgnoredPath(matcher: Matcher): void;
|
||||
_removeIgnoredPath(matcher: Matcher): void;
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_: Path | Path[], _origAdd?: string, _internal?: boolean): FSWatcher;
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_: Path | Path[]): FSWatcher;
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close(): Promise<void>;
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched(): Record<string, string[]>;
|
||||
emitWithAll(event: EventName, args: EmitArgs): void;
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_emit(event: EventName, path: Path, stats?: Stats): Promise<this | undefined>;
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error: Error): Error | boolean;
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType: ThrottleType, path: Path, timeout: number): Throttler | false;
|
||||
_incrReadyCount(): number;
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path: Path, threshold: number, event: EventName, awfEmit: (err?: Error, stat?: Stats) => void): void;
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path: Path, stats?: Stats): boolean;
|
||||
_isntIgnored(path: Path, stat?: Stats): boolean;
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path: Path): WatchHelper;
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory: string): DirEntry;
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats: Stats): boolean;
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory: string, item: string, isDirectory?: boolean): void;
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path: Path): void;
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path: Path): void;
|
||||
_addPathCloser(path: Path, closer: () => void): void;
|
||||
_readdirp(root: Path, opts?: Partial<ReaddirpOptions>): ReaddirpStream | undefined;
|
||||
}
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
export declare function watch(paths: string | string[], options?: ChokidarOptions): FSWatcher;
|
||||
declare const _default: {
|
||||
watch: typeof watch;
|
||||
FSWatcher: typeof FSWatcher;
|
||||
};
|
||||
export default _default;
|
798
node_modules/chokidar/esm/index.js
generated
vendored
Normal file
798
node_modules/chokidar/esm/index.js
generated
vendored
Normal file
|
@ -0,0 +1,798 @@
|
|||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
import { stat as statcb } from 'fs';
|
||||
import { stat, readdir } from 'fs/promises';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as sysPath from 'path';
|
||||
import { readdirp } from 'readdirp';
|
||||
import { NodeFsHandler, EVENTS as EV, isWindows, isIBMi, EMPTY_FN, STR_CLOSE, STR_END, } from './handler.js';
|
||||
const SLASH = '/';
|
||||
const SLASH_SLASH = '//';
|
||||
const ONE_DOT = '.';
|
||||
const TWO_DOTS = '..';
|
||||
const STRING_TYPE = 'string';
|
||||
const BACK_SLASH_RE = /\\/g;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
const DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
|
||||
const REPLACER_RE = /^\.[/\\]/;
|
||||
function arrify(item) {
|
||||
return Array.isArray(item) ? item : [item];
|
||||
}
|
||||
const isMatcherObject = (matcher) => typeof matcher === 'object' && matcher !== null && !(matcher instanceof RegExp);
|
||||
function createPattern(matcher) {
|
||||
if (typeof matcher === 'function')
|
||||
return matcher;
|
||||
if (typeof matcher === 'string')
|
||||
return (string) => matcher === string;
|
||||
if (matcher instanceof RegExp)
|
||||
return (string) => matcher.test(string);
|
||||
if (typeof matcher === 'object' && matcher !== null) {
|
||||
return (string) => {
|
||||
if (matcher.path === string)
|
||||
return true;
|
||||
if (matcher.recursive) {
|
||||
const relative = sysPath.relative(matcher.path, string);
|
||||
if (!relative) {
|
||||
return false;
|
||||
}
|
||||
return !relative.startsWith('..') && !sysPath.isAbsolute(relative);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
return () => false;
|
||||
}
|
||||
function normalizePath(path) {
|
||||
if (typeof path !== 'string')
|
||||
throw new Error('string expected');
|
||||
path = sysPath.normalize(path);
|
||||
path = path.replace(/\\/g, '/');
|
||||
let prepend = false;
|
||||
if (path.startsWith('//'))
|
||||
prepend = true;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
while (path.match(DOUBLE_SLASH_RE))
|
||||
path = path.replace(DOUBLE_SLASH_RE, '/');
|
||||
if (prepend)
|
||||
path = '/' + path;
|
||||
return path;
|
||||
}
|
||||
function matchPatterns(patterns, testString, stats) {
|
||||
const path = normalizePath(testString);
|
||||
for (let index = 0; index < patterns.length; index++) {
|
||||
const pattern = patterns[index];
|
||||
if (pattern(path, stats)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function anymatch(matchers, testString) {
|
||||
if (matchers == null) {
|
||||
throw new TypeError('anymatch: specify first argument');
|
||||
}
|
||||
// Early cache for matchers.
|
||||
const matchersArray = arrify(matchers);
|
||||
const patterns = matchersArray.map((matcher) => createPattern(matcher));
|
||||
if (testString == null) {
|
||||
return (testString, stats) => {
|
||||
return matchPatterns(patterns, testString, stats);
|
||||
};
|
||||
}
|
||||
return matchPatterns(patterns, testString);
|
||||
}
|
||||
const unifyPaths = (paths_) => {
|
||||
const paths = arrify(paths_).flat();
|
||||
if (!paths.every((p) => typeof p === STRING_TYPE)) {
|
||||
throw new TypeError(`Non-string provided as watch path: ${paths}`);
|
||||
}
|
||||
return paths.map(normalizePathToUnix);
|
||||
};
|
||||
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
|
||||
// because "//StoragePC/DrivePool/Movies" is a valid network path
|
||||
const toUnix = (string) => {
|
||||
let str = string.replace(BACK_SLASH_RE, SLASH);
|
||||
let prepend = false;
|
||||
if (str.startsWith(SLASH_SLASH)) {
|
||||
prepend = true;
|
||||
}
|
||||
while (str.match(DOUBLE_SLASH_RE)) {
|
||||
str = str.replace(DOUBLE_SLASH_RE, SLASH);
|
||||
}
|
||||
if (prepend) {
|
||||
str = SLASH + str;
|
||||
}
|
||||
return str;
|
||||
};
|
||||
// Our version of upath.normalize
|
||||
// TODO: this is not equal to path-normalize module - investigate why
|
||||
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
|
||||
// TODO: refactor
|
||||
const normalizeIgnored = (cwd = '') => (path) => {
|
||||
if (typeof path === 'string') {
|
||||
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
|
||||
}
|
||||
else {
|
||||
return path;
|
||||
}
|
||||
};
|
||||
const getAbsolutePath = (path, cwd) => {
|
||||
if (sysPath.isAbsolute(path)) {
|
||||
return path;
|
||||
}
|
||||
return sysPath.join(cwd, path);
|
||||
};
|
||||
const EMPTY_SET = Object.freeze(new Set());
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
class DirEntry {
|
||||
constructor(dir, removeWatcher) {
|
||||
this.path = dir;
|
||||
this._removeWatcher = removeWatcher;
|
||||
this.items = new Set();
|
||||
}
|
||||
add(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
if (item !== ONE_DOT && item !== TWO_DOTS)
|
||||
items.add(item);
|
||||
}
|
||||
async remove(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
items.delete(item);
|
||||
if (items.size > 0)
|
||||
return;
|
||||
const dir = this.path;
|
||||
try {
|
||||
await readdir(dir);
|
||||
}
|
||||
catch (err) {
|
||||
if (this._removeWatcher) {
|
||||
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
|
||||
}
|
||||
}
|
||||
}
|
||||
has(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
return items.has(item);
|
||||
}
|
||||
getChildren() {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return [];
|
||||
return [...items.values()];
|
||||
}
|
||||
dispose() {
|
||||
this.items.clear();
|
||||
this.path = '';
|
||||
this._removeWatcher = EMPTY_FN;
|
||||
this.items = EMPTY_SET;
|
||||
Object.freeze(this);
|
||||
}
|
||||
}
|
||||
const STAT_METHOD_F = 'stat';
|
||||
const STAT_METHOD_L = 'lstat';
|
||||
export class WatchHelper {
|
||||
constructor(path, follow, fsw) {
|
||||
this.fsw = fsw;
|
||||
const watchPath = path;
|
||||
this.path = path = path.replace(REPLACER_RE, '');
|
||||
this.watchPath = watchPath;
|
||||
this.fullWatchPath = sysPath.resolve(watchPath);
|
||||
this.dirParts = [];
|
||||
this.dirParts.forEach((parts) => {
|
||||
if (parts.length > 1)
|
||||
parts.pop();
|
||||
});
|
||||
this.followSymlinks = follow;
|
||||
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
|
||||
}
|
||||
entryPath(entry) {
|
||||
return sysPath.join(this.watchPath, sysPath.relative(this.watchPath, entry.fullPath));
|
||||
}
|
||||
filterPath(entry) {
|
||||
const { stats } = entry;
|
||||
if (stats && stats.isSymbolicLink())
|
||||
return this.filterDir(entry);
|
||||
const resolvedPath = this.entryPath(entry);
|
||||
// TODO: what if stats is undefined? remove !
|
||||
return this.fsw._isntIgnored(resolvedPath, stats) && this.fsw._hasReadPermissions(stats);
|
||||
}
|
||||
filterDir(entry) {
|
||||
return this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
export class FSWatcher extends EventEmitter {
|
||||
// Not indenting methods for history sake; for now.
|
||||
constructor(_opts = {}) {
|
||||
super();
|
||||
this.closed = false;
|
||||
this._closers = new Map();
|
||||
this._ignoredPaths = new Set();
|
||||
this._throttled = new Map();
|
||||
this._streams = new Set();
|
||||
this._symlinkPaths = new Map();
|
||||
this._watched = new Map();
|
||||
this._pendingWrites = new Map();
|
||||
this._pendingUnlinks = new Map();
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
const awf = _opts.awaitWriteFinish;
|
||||
const DEF_AWF = { stabilityThreshold: 2000, pollInterval: 100 };
|
||||
const opts = {
|
||||
// Defaults
|
||||
persistent: true,
|
||||
ignoreInitial: false,
|
||||
ignorePermissionErrors: false,
|
||||
interval: 100,
|
||||
binaryInterval: 300,
|
||||
followSymlinks: true,
|
||||
usePolling: false,
|
||||
// useAsync: false,
|
||||
atomic: true, // NOTE: overwritten later (depends on usePolling)
|
||||
..._opts,
|
||||
// Change format
|
||||
ignored: _opts.ignored ? arrify(_opts.ignored) : arrify([]),
|
||||
awaitWriteFinish: awf === true ? DEF_AWF : typeof awf === 'object' ? { ...DEF_AWF, ...awf } : false,
|
||||
};
|
||||
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
|
||||
if (isIBMi)
|
||||
opts.usePolling = true;
|
||||
// Editor atomic write normalization enabled by default with fs.watch
|
||||
if (opts.atomic === undefined)
|
||||
opts.atomic = !opts.usePolling;
|
||||
// opts.atomic = typeof _opts.atomic === 'number' ? _opts.atomic : 100;
|
||||
// Global override. Useful for developers, who need to force polling for all
|
||||
// instances of chokidar, regardless of usage / dependency depth
|
||||
const envPoll = process.env.CHOKIDAR_USEPOLLING;
|
||||
if (envPoll !== undefined) {
|
||||
const envLower = envPoll.toLowerCase();
|
||||
if (envLower === 'false' || envLower === '0')
|
||||
opts.usePolling = false;
|
||||
else if (envLower === 'true' || envLower === '1')
|
||||
opts.usePolling = true;
|
||||
else
|
||||
opts.usePolling = !!envLower;
|
||||
}
|
||||
const envInterval = process.env.CHOKIDAR_INTERVAL;
|
||||
if (envInterval)
|
||||
opts.interval = Number.parseInt(envInterval, 10);
|
||||
// This is done to emit ready only once, but each 'add' will increase that?
|
||||
let readyCalls = 0;
|
||||
this._emitReady = () => {
|
||||
readyCalls++;
|
||||
if (readyCalls >= this._readyCount) {
|
||||
this._emitReady = EMPTY_FN;
|
||||
this._readyEmitted = true;
|
||||
// use process.nextTick to allow time for listener to be bound
|
||||
process.nextTick(() => this.emit(EV.READY));
|
||||
}
|
||||
};
|
||||
this._emitRaw = (...args) => this.emit(EV.RAW, ...args);
|
||||
this._boundRemove = this._remove.bind(this);
|
||||
this.options = opts;
|
||||
this._nodeFsHandler = new NodeFsHandler(this);
|
||||
// You’re frozen when your heart’s not open.
|
||||
Object.freeze(opts);
|
||||
}
|
||||
_addIgnoredPath(matcher) {
|
||||
if (isMatcherObject(matcher)) {
|
||||
// return early if we already have a deeply equal matcher object
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
if (isMatcherObject(ignored) &&
|
||||
ignored.path === matcher.path &&
|
||||
ignored.recursive === matcher.recursive) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
this._ignoredPaths.add(matcher);
|
||||
}
|
||||
_removeIgnoredPath(matcher) {
|
||||
this._ignoredPaths.delete(matcher);
|
||||
// now find any matcher objects with the matcher as path
|
||||
if (typeof matcher === 'string') {
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
// TODO (43081j): make this more efficient.
|
||||
// probably just make a `this._ignoredDirectories` or some
|
||||
// such thing.
|
||||
if (isMatcherObject(ignored) && ignored.path === matcher) {
|
||||
this._ignoredPaths.delete(ignored);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Public methods
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_, _origAdd, _internal) {
|
||||
const { cwd } = this.options;
|
||||
this.closed = false;
|
||||
this._closePromise = undefined;
|
||||
let paths = unifyPaths(paths_);
|
||||
if (cwd) {
|
||||
paths = paths.map((path) => {
|
||||
const absPath = getAbsolutePath(path, cwd);
|
||||
// Check `path` instead of `absPath` because the cwd portion can't be a glob
|
||||
return absPath;
|
||||
});
|
||||
}
|
||||
paths.forEach((path) => {
|
||||
this._removeIgnoredPath(path);
|
||||
});
|
||||
this._userIgnored = undefined;
|
||||
if (!this._readyCount)
|
||||
this._readyCount = 0;
|
||||
this._readyCount += paths.length;
|
||||
Promise.all(paths.map(async (path) => {
|
||||
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, undefined, 0, _origAdd);
|
||||
if (res)
|
||||
this._emitReady();
|
||||
return res;
|
||||
})).then((results) => {
|
||||
if (this.closed)
|
||||
return;
|
||||
results.forEach((item) => {
|
||||
if (item)
|
||||
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
|
||||
});
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_) {
|
||||
if (this.closed)
|
||||
return this;
|
||||
const paths = unifyPaths(paths_);
|
||||
const { cwd } = this.options;
|
||||
paths.forEach((path) => {
|
||||
// convert to absolute path unless relative path already matches
|
||||
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
|
||||
if (cwd)
|
||||
path = sysPath.join(cwd, path);
|
||||
path = sysPath.resolve(path);
|
||||
}
|
||||
this._closePath(path);
|
||||
this._addIgnoredPath(path);
|
||||
if (this._watched.has(path)) {
|
||||
this._addIgnoredPath({
|
||||
path,
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
// reset the cached userIgnored anymatch fn
|
||||
// to make ignoredPaths changes effective
|
||||
this._userIgnored = undefined;
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close() {
|
||||
if (this._closePromise) {
|
||||
return this._closePromise;
|
||||
}
|
||||
this.closed = true;
|
||||
// Memory management.
|
||||
this.removeAllListeners();
|
||||
const closers = [];
|
||||
this._closers.forEach((closerList) => closerList.forEach((closer) => {
|
||||
const promise = closer();
|
||||
if (promise instanceof Promise)
|
||||
closers.push(promise);
|
||||
}));
|
||||
this._streams.forEach((stream) => stream.destroy());
|
||||
this._userIgnored = undefined;
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
this._watched.forEach((dirent) => dirent.dispose());
|
||||
this._closers.clear();
|
||||
this._watched.clear();
|
||||
this._streams.clear();
|
||||
this._symlinkPaths.clear();
|
||||
this._throttled.clear();
|
||||
this._closePromise = closers.length
|
||||
? Promise.all(closers).then(() => undefined)
|
||||
: Promise.resolve();
|
||||
return this._closePromise;
|
||||
}
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched() {
|
||||
const watchList = {};
|
||||
this._watched.forEach((entry, dir) => {
|
||||
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
|
||||
const index = key || ONE_DOT;
|
||||
watchList[index] = entry.getChildren().sort();
|
||||
});
|
||||
return watchList;
|
||||
}
|
||||
emitWithAll(event, args) {
|
||||
this.emit(event, ...args);
|
||||
if (event !== EV.ERROR)
|
||||
this.emit(EV.ALL, event, ...args);
|
||||
}
|
||||
// Common helpers
|
||||
// --------------
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
async _emit(event, path, stats) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const opts = this.options;
|
||||
if (isWindows)
|
||||
path = sysPath.normalize(path);
|
||||
if (opts.cwd)
|
||||
path = sysPath.relative(opts.cwd, path);
|
||||
const args = [path];
|
||||
if (stats != null)
|
||||
args.push(stats);
|
||||
const awf = opts.awaitWriteFinish;
|
||||
let pw;
|
||||
if (awf && (pw = this._pendingWrites.get(path))) {
|
||||
pw.lastChange = new Date();
|
||||
return this;
|
||||
}
|
||||
if (opts.atomic) {
|
||||
if (event === EV.UNLINK) {
|
||||
this._pendingUnlinks.set(path, [event, ...args]);
|
||||
setTimeout(() => {
|
||||
this._pendingUnlinks.forEach((entry, path) => {
|
||||
this.emit(...entry);
|
||||
this.emit(EV.ALL, ...entry);
|
||||
this._pendingUnlinks.delete(path);
|
||||
});
|
||||
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
|
||||
return this;
|
||||
}
|
||||
if (event === EV.ADD && this._pendingUnlinks.has(path)) {
|
||||
event = EV.CHANGE;
|
||||
this._pendingUnlinks.delete(path);
|
||||
}
|
||||
}
|
||||
if (awf && (event === EV.ADD || event === EV.CHANGE) && this._readyEmitted) {
|
||||
const awfEmit = (err, stats) => {
|
||||
if (err) {
|
||||
event = EV.ERROR;
|
||||
args[0] = err;
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
else if (stats) {
|
||||
// if stats doesn't exist the file must have been deleted
|
||||
if (args.length > 1) {
|
||||
args[1] = stats;
|
||||
}
|
||||
else {
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
};
|
||||
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
|
||||
return this;
|
||||
}
|
||||
if (event === EV.CHANGE) {
|
||||
const isThrottled = !this._throttle(EV.CHANGE, path, 50);
|
||||
if (isThrottled)
|
||||
return this;
|
||||
}
|
||||
if (opts.alwaysStat &&
|
||||
stats === undefined &&
|
||||
(event === EV.ADD || event === EV.ADD_DIR || event === EV.CHANGE)) {
|
||||
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
|
||||
let stats;
|
||||
try {
|
||||
stats = await stat(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
|
||||
if (!stats || this.closed)
|
||||
return;
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error) {
|
||||
const code = error && error.code;
|
||||
if (error &&
|
||||
code !== 'ENOENT' &&
|
||||
code !== 'ENOTDIR' &&
|
||||
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))) {
|
||||
this.emit(EV.ERROR, error);
|
||||
}
|
||||
return error || this.closed;
|
||||
}
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType, path, timeout) {
|
||||
if (!this._throttled.has(actionType)) {
|
||||
this._throttled.set(actionType, new Map());
|
||||
}
|
||||
const action = this._throttled.get(actionType);
|
||||
if (!action)
|
||||
throw new Error('invalid throttle');
|
||||
const actionPath = action.get(path);
|
||||
if (actionPath) {
|
||||
actionPath.count++;
|
||||
return false;
|
||||
}
|
||||
// eslint-disable-next-line prefer-const
|
||||
let timeoutObject;
|
||||
const clear = () => {
|
||||
const item = action.get(path);
|
||||
const count = item ? item.count : 0;
|
||||
action.delete(path);
|
||||
clearTimeout(timeoutObject);
|
||||
if (item)
|
||||
clearTimeout(item.timeoutObject);
|
||||
return count;
|
||||
};
|
||||
timeoutObject = setTimeout(clear, timeout);
|
||||
const thr = { timeoutObject, clear, count: 0 };
|
||||
action.set(path, thr);
|
||||
return thr;
|
||||
}
|
||||
_incrReadyCount() {
|
||||
return this._readyCount++;
|
||||
}
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path, threshold, event, awfEmit) {
|
||||
const awf = this.options.awaitWriteFinish;
|
||||
if (typeof awf !== 'object')
|
||||
return;
|
||||
const pollInterval = awf.pollInterval;
|
||||
let timeoutHandler;
|
||||
let fullPath = path;
|
||||
if (this.options.cwd && !sysPath.isAbsolute(path)) {
|
||||
fullPath = sysPath.join(this.options.cwd, path);
|
||||
}
|
||||
const now = new Date();
|
||||
const writes = this._pendingWrites;
|
||||
function awaitWriteFinishFn(prevStat) {
|
||||
statcb(fullPath, (err, curStat) => {
|
||||
if (err || !writes.has(path)) {
|
||||
if (err && err.code !== 'ENOENT')
|
||||
awfEmit(err);
|
||||
return;
|
||||
}
|
||||
const now = Number(new Date());
|
||||
if (prevStat && curStat.size !== prevStat.size) {
|
||||
writes.get(path).lastChange = now;
|
||||
}
|
||||
const pw = writes.get(path);
|
||||
const df = now - pw.lastChange;
|
||||
if (df >= threshold) {
|
||||
writes.delete(path);
|
||||
awfEmit(undefined, curStat);
|
||||
}
|
||||
else {
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval, curStat);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!writes.has(path)) {
|
||||
writes.set(path, {
|
||||
lastChange: now,
|
||||
cancelWait: () => {
|
||||
writes.delete(path);
|
||||
clearTimeout(timeoutHandler);
|
||||
return event;
|
||||
},
|
||||
});
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path, stats) {
|
||||
if (this.options.atomic && DOT_RE.test(path))
|
||||
return true;
|
||||
if (!this._userIgnored) {
|
||||
const { cwd } = this.options;
|
||||
const ign = this.options.ignored;
|
||||
const ignored = (ign || []).map(normalizeIgnored(cwd));
|
||||
const ignoredPaths = [...this._ignoredPaths];
|
||||
const list = [...ignoredPaths.map(normalizeIgnored(cwd)), ...ignored];
|
||||
this._userIgnored = anymatch(list, undefined);
|
||||
}
|
||||
return this._userIgnored(path, stats);
|
||||
}
|
||||
_isntIgnored(path, stat) {
|
||||
return !this._isIgnored(path, stat);
|
||||
}
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path) {
|
||||
return new WatchHelper(path, this.options.followSymlinks, this);
|
||||
}
|
||||
// Directory helpers
|
||||
// -----------------
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory) {
|
||||
const dir = sysPath.resolve(directory);
|
||||
if (!this._watched.has(dir))
|
||||
this._watched.set(dir, new DirEntry(dir, this._boundRemove));
|
||||
return this._watched.get(dir);
|
||||
}
|
||||
// File helpers
|
||||
// ------------
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats) {
|
||||
if (this.options.ignorePermissionErrors)
|
||||
return true;
|
||||
return Boolean(Number(stats.mode) & 0o400);
|
||||
}
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory, item, isDirectory) {
|
||||
// if what is being deleted is a directory, get that directory's paths
|
||||
// for recursive deleting and cleaning of watched object
|
||||
// if it is not a directory, nestedDirectoryChildren will be empty array
|
||||
const path = sysPath.join(directory, item);
|
||||
const fullPath = sysPath.resolve(path);
|
||||
isDirectory =
|
||||
isDirectory != null ? isDirectory : this._watched.has(path) || this._watched.has(fullPath);
|
||||
// prevent duplicate handling in case of arriving here nearly simultaneously
|
||||
// via multiple paths (such as _handleFile and _handleDir)
|
||||
if (!this._throttle('remove', path, 100))
|
||||
return;
|
||||
// if the only watched file is removed, watch for its return
|
||||
if (!isDirectory && this._watched.size === 1) {
|
||||
this.add(directory, item, true);
|
||||
}
|
||||
// This will create a new entry in the watched object in either case
|
||||
// so we got to do the directory check beforehand
|
||||
const wp = this._getWatchedDir(path);
|
||||
const nestedDirectoryChildren = wp.getChildren();
|
||||
// Recursively remove children directories / files.
|
||||
nestedDirectoryChildren.forEach((nested) => this._remove(path, nested));
|
||||
// Check if item was on the watched list and remove it
|
||||
const parent = this._getWatchedDir(directory);
|
||||
const wasTracked = parent.has(item);
|
||||
parent.remove(item);
|
||||
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
|
||||
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
|
||||
// but never removed from the map in case the path was deleted.
|
||||
// This leads to an incorrect state if the path was recreated:
|
||||
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
|
||||
if (this._symlinkPaths.has(fullPath)) {
|
||||
this._symlinkPaths.delete(fullPath);
|
||||
}
|
||||
// If we wait for this file to be fully written, cancel the wait.
|
||||
let relPath = path;
|
||||
if (this.options.cwd)
|
||||
relPath = sysPath.relative(this.options.cwd, path);
|
||||
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
|
||||
const event = this._pendingWrites.get(relPath).cancelWait();
|
||||
if (event === EV.ADD)
|
||||
return;
|
||||
}
|
||||
// The Entry will either be a directory that just got removed
|
||||
// or a bogus entry to a file, in either case we have to remove it
|
||||
this._watched.delete(path);
|
||||
this._watched.delete(fullPath);
|
||||
const eventName = isDirectory ? EV.UNLINK_DIR : EV.UNLINK;
|
||||
if (wasTracked && !this._isIgnored(path))
|
||||
this._emit(eventName, path);
|
||||
// Avoid conflicts if we later create another file with the same name
|
||||
this._closePath(path);
|
||||
}
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path) {
|
||||
this._closeFile(path);
|
||||
const dir = sysPath.dirname(path);
|
||||
this._getWatchedDir(dir).remove(sysPath.basename(path));
|
||||
}
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path) {
|
||||
const closers = this._closers.get(path);
|
||||
if (!closers)
|
||||
return;
|
||||
closers.forEach((closer) => closer());
|
||||
this._closers.delete(path);
|
||||
}
|
||||
_addPathCloser(path, closer) {
|
||||
if (!closer)
|
||||
return;
|
||||
let list = this._closers.get(path);
|
||||
if (!list) {
|
||||
list = [];
|
||||
this._closers.set(path, list);
|
||||
}
|
||||
list.push(closer);
|
||||
}
|
||||
_readdirp(root, opts) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const options = { type: EV.ALL, alwaysStat: true, lstat: true, ...opts, depth: 0 };
|
||||
let stream = readdirp(root, options);
|
||||
this._streams.add(stream);
|
||||
stream.once(STR_CLOSE, () => {
|
||||
stream = undefined;
|
||||
});
|
||||
stream.once(STR_END, () => {
|
||||
if (stream) {
|
||||
this._streams.delete(stream);
|
||||
stream = undefined;
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
export function watch(paths, options = {}) {
|
||||
const watcher = new FSWatcher(options);
|
||||
watcher.add(paths);
|
||||
return watcher;
|
||||
}
|
||||
export default { watch, FSWatcher };
|
1
node_modules/chokidar/esm/package.json
generated
vendored
Normal file
1
node_modules/chokidar/esm/package.json
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{ "type": "module", "sideEffects": false }
|
90
node_modules/chokidar/handler.d.ts
generated
vendored
Normal file
90
node_modules/chokidar/handler.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
import type { WatchEventType, Stats, FSWatcher as NativeFsWatcher } from 'fs';
|
||||
import type { FSWatcher, WatchHelper, Throttler } from './index.js';
|
||||
import type { EntryInfo } from 'readdirp';
|
||||
export type Path = string;
|
||||
export declare const STR_DATA = "data";
|
||||
export declare const STR_END = "end";
|
||||
export declare const STR_CLOSE = "close";
|
||||
export declare const EMPTY_FN: () => void;
|
||||
export declare const IDENTITY_FN: (val: unknown) => unknown;
|
||||
export declare const isWindows: boolean;
|
||||
export declare const isMacos: boolean;
|
||||
export declare const isLinux: boolean;
|
||||
export declare const isFreeBSD: boolean;
|
||||
export declare const isIBMi: boolean;
|
||||
export declare const EVENTS: {
|
||||
readonly ALL: "all";
|
||||
readonly READY: "ready";
|
||||
readonly ADD: "add";
|
||||
readonly CHANGE: "change";
|
||||
readonly ADD_DIR: "addDir";
|
||||
readonly UNLINK: "unlink";
|
||||
readonly UNLINK_DIR: "unlinkDir";
|
||||
readonly RAW: "raw";
|
||||
readonly ERROR: "error";
|
||||
};
|
||||
export type EventName = (typeof EVENTS)[keyof typeof EVENTS];
|
||||
export type FsWatchContainer = {
|
||||
listeners: (path: string) => void | Set<any>;
|
||||
errHandlers: (err: unknown) => void | Set<any>;
|
||||
rawEmitters: (ev: WatchEventType, path: string, opts: unknown) => void | Set<any>;
|
||||
watcher: NativeFsWatcher;
|
||||
watcherUnusable?: boolean;
|
||||
};
|
||||
export interface WatchHandlers {
|
||||
listener: (path: string) => void;
|
||||
errHandler: (err: unknown) => void;
|
||||
rawEmitter: (ev: WatchEventType, path: string, opts: unknown) => void;
|
||||
}
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
export declare class NodeFsHandler {
|
||||
fsw: FSWatcher;
|
||||
_boundHandleError: (error: unknown) => void;
|
||||
constructor(fsW: FSWatcher);
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path: string, listener: (path: string, newStats?: any) => void | Promise<void>): (() => void) | undefined;
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file: Path, stats: Stats, initialAdd: boolean): (() => void) | undefined;
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
_handleSymlink(entry: EntryInfo, directory: string, path: Path, item: string): Promise<boolean | undefined>;
|
||||
_handleRead(directory: string, initialAdd: boolean, wh: WatchHelper, target: Path, dir: Path, depth: number, throttler: Throttler): Promise<unknown> | undefined;
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
_handleDir(dir: string, stats: Stats, initialAdd: boolean, depth: number, target: string, wh: WatchHelper, realpath: string): Promise<(() => void) | undefined>;
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
_addToNodeFs(path: string, initialAdd: boolean, priorWh: WatchHelper | undefined, depth: number, target?: string): Promise<string | false | undefined>;
|
||||
}
|
635
node_modules/chokidar/handler.js
generated
vendored
Normal file
635
node_modules/chokidar/handler.js
generated
vendored
Normal file
|
@ -0,0 +1,635 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NodeFsHandler = exports.EVENTS = exports.isIBMi = exports.isFreeBSD = exports.isLinux = exports.isMacos = exports.isWindows = exports.IDENTITY_FN = exports.EMPTY_FN = exports.STR_CLOSE = exports.STR_END = exports.STR_DATA = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
const sysPath = require("path");
|
||||
const os_1 = require("os");
|
||||
exports.STR_DATA = 'data';
|
||||
exports.STR_END = 'end';
|
||||
exports.STR_CLOSE = 'close';
|
||||
const EMPTY_FN = () => { };
|
||||
exports.EMPTY_FN = EMPTY_FN;
|
||||
const IDENTITY_FN = (val) => val;
|
||||
exports.IDENTITY_FN = IDENTITY_FN;
|
||||
const pl = process.platform;
|
||||
exports.isWindows = pl === 'win32';
|
||||
exports.isMacos = pl === 'darwin';
|
||||
exports.isLinux = pl === 'linux';
|
||||
exports.isFreeBSD = pl === 'freebsd';
|
||||
exports.isIBMi = (0, os_1.type)() === 'OS400';
|
||||
exports.EVENTS = {
|
||||
ALL: 'all',
|
||||
READY: 'ready',
|
||||
ADD: 'add',
|
||||
CHANGE: 'change',
|
||||
ADD_DIR: 'addDir',
|
||||
UNLINK: 'unlink',
|
||||
UNLINK_DIR: 'unlinkDir',
|
||||
RAW: 'raw',
|
||||
ERROR: 'error',
|
||||
};
|
||||
const EV = exports.EVENTS;
|
||||
const THROTTLE_MODE_WATCH = 'watch';
|
||||
const statMethods = { lstat: promises_1.lstat, stat: promises_1.stat };
|
||||
const KEY_LISTENERS = 'listeners';
|
||||
const KEY_ERR = 'errHandlers';
|
||||
const KEY_RAW = 'rawEmitters';
|
||||
const HANDLER_KEYS = [KEY_LISTENERS, KEY_ERR, KEY_RAW];
|
||||
// prettier-ignore
|
||||
const binaryExtensions = new Set([
|
||||
'3dm', '3ds', '3g2', '3gp', '7z', 'a', 'aac', 'adp', 'afdesign', 'afphoto', 'afpub', 'ai',
|
||||
'aif', 'aiff', 'alz', 'ape', 'apk', 'appimage', 'ar', 'arj', 'asf', 'au', 'avi',
|
||||
'bak', 'baml', 'bh', 'bin', 'bk', 'bmp', 'btif', 'bz2', 'bzip2',
|
||||
'cab', 'caf', 'cgm', 'class', 'cmx', 'cpio', 'cr2', 'cur', 'dat', 'dcm', 'deb', 'dex', 'djvu',
|
||||
'dll', 'dmg', 'dng', 'doc', 'docm', 'docx', 'dot', 'dotm', 'dra', 'DS_Store', 'dsk', 'dts',
|
||||
'dtshd', 'dvb', 'dwg', 'dxf',
|
||||
'ecelp4800', 'ecelp7470', 'ecelp9600', 'egg', 'eol', 'eot', 'epub', 'exe',
|
||||
'f4v', 'fbs', 'fh', 'fla', 'flac', 'flatpak', 'fli', 'flv', 'fpx', 'fst', 'fvt',
|
||||
'g3', 'gh', 'gif', 'graffle', 'gz', 'gzip',
|
||||
'h261', 'h263', 'h264', 'icns', 'ico', 'ief', 'img', 'ipa', 'iso',
|
||||
'jar', 'jpeg', 'jpg', 'jpgv', 'jpm', 'jxr', 'key', 'ktx',
|
||||
'lha', 'lib', 'lvp', 'lz', 'lzh', 'lzma', 'lzo',
|
||||
'm3u', 'm4a', 'm4v', 'mar', 'mdi', 'mht', 'mid', 'midi', 'mj2', 'mka', 'mkv', 'mmr', 'mng',
|
||||
'mobi', 'mov', 'movie', 'mp3',
|
||||
'mp4', 'mp4a', 'mpeg', 'mpg', 'mpga', 'mxu',
|
||||
'nef', 'npx', 'numbers', 'nupkg',
|
||||
'o', 'odp', 'ods', 'odt', 'oga', 'ogg', 'ogv', 'otf', 'ott',
|
||||
'pages', 'pbm', 'pcx', 'pdb', 'pdf', 'pea', 'pgm', 'pic', 'png', 'pnm', 'pot', 'potm',
|
||||
'potx', 'ppa', 'ppam',
|
||||
'ppm', 'pps', 'ppsm', 'ppsx', 'ppt', 'pptm', 'pptx', 'psd', 'pya', 'pyc', 'pyo', 'pyv',
|
||||
'qt',
|
||||
'rar', 'ras', 'raw', 'resources', 'rgb', 'rip', 'rlc', 'rmf', 'rmvb', 'rpm', 'rtf', 'rz',
|
||||
's3m', 's7z', 'scpt', 'sgi', 'shar', 'snap', 'sil', 'sketch', 'slk', 'smv', 'snk', 'so',
|
||||
'stl', 'suo', 'sub', 'swf',
|
||||
'tar', 'tbz', 'tbz2', 'tga', 'tgz', 'thmx', 'tif', 'tiff', 'tlz', 'ttc', 'ttf', 'txz',
|
||||
'udf', 'uvh', 'uvi', 'uvm', 'uvp', 'uvs', 'uvu',
|
||||
'viv', 'vob',
|
||||
'war', 'wav', 'wax', 'wbmp', 'wdp', 'weba', 'webm', 'webp', 'whl', 'wim', 'wm', 'wma',
|
||||
'wmv', 'wmx', 'woff', 'woff2', 'wrm', 'wvx',
|
||||
'xbm', 'xif', 'xla', 'xlam', 'xls', 'xlsb', 'xlsm', 'xlsx', 'xlt', 'xltm', 'xltx', 'xm',
|
||||
'xmind', 'xpi', 'xpm', 'xwd', 'xz',
|
||||
'z', 'zip', 'zipx',
|
||||
]);
|
||||
const isBinaryPath = (filePath) => binaryExtensions.has(sysPath.extname(filePath).slice(1).toLowerCase());
|
||||
// TODO: emit errors properly. Example: EMFILE on Macos.
|
||||
const foreach = (val, fn) => {
|
||||
if (val instanceof Set) {
|
||||
val.forEach(fn);
|
||||
}
|
||||
else {
|
||||
fn(val);
|
||||
}
|
||||
};
|
||||
const addAndConvert = (main, prop, item) => {
|
||||
let container = main[prop];
|
||||
if (!(container instanceof Set)) {
|
||||
main[prop] = container = new Set([container]);
|
||||
}
|
||||
container.add(item);
|
||||
};
|
||||
const clearItem = (cont) => (key) => {
|
||||
const set = cont[key];
|
||||
if (set instanceof Set) {
|
||||
set.clear();
|
||||
}
|
||||
else {
|
||||
delete cont[key];
|
||||
}
|
||||
};
|
||||
const delFromSet = (main, prop, item) => {
|
||||
const container = main[prop];
|
||||
if (container instanceof Set) {
|
||||
container.delete(item);
|
||||
}
|
||||
else if (container === item) {
|
||||
delete main[prop];
|
||||
}
|
||||
};
|
||||
const isEmptySet = (val) => (val instanceof Set ? val.size === 0 : !val);
|
||||
const FsWatchInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watch interface
|
||||
* @param path to be watched
|
||||
* @param options to be passed to fs_watch
|
||||
* @param listener main event handler
|
||||
* @param errHandler emits info about errors
|
||||
* @param emitRaw emits raw event data
|
||||
* @returns {NativeFsWatcher}
|
||||
*/
|
||||
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
|
||||
const handleEvent = (rawEvent, evPath) => {
|
||||
listener(path);
|
||||
emitRaw(rawEvent, evPath, { watchedPath: path });
|
||||
// emit based on events occurring for files from a directory's watcher in
|
||||
// case the file's watcher misses it (and rely on throttling to de-dupe)
|
||||
if (evPath && path !== evPath) {
|
||||
fsWatchBroadcast(sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath));
|
||||
}
|
||||
};
|
||||
try {
|
||||
return (0, fs_1.watch)(path, {
|
||||
persistent: options.persistent,
|
||||
}, handleEvent);
|
||||
}
|
||||
catch (error) {
|
||||
errHandler(error);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper for passing fs_watch event data to a collection of listeners
|
||||
* @param fullPath absolute path bound to fs_watch instance
|
||||
*/
|
||||
const fsWatchBroadcast = (fullPath, listenerType, val1, val2, val3) => {
|
||||
const cont = FsWatchInstances.get(fullPath);
|
||||
if (!cont)
|
||||
return;
|
||||
foreach(cont[listenerType], (listener) => {
|
||||
listener(val1, val2, val3);
|
||||
});
|
||||
};
|
||||
/**
|
||||
* Instantiates the fs_watch interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path
|
||||
* @param fullPath absolute path
|
||||
* @param options to be passed to fs_watch
|
||||
* @param handlers container for event listener functions
|
||||
*/
|
||||
const setFsWatchListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, errHandler, rawEmitter } = handlers;
|
||||
let cont = FsWatchInstances.get(fullPath);
|
||||
let watcher;
|
||||
if (!options.persistent) {
|
||||
watcher = createFsWatchInstance(path, options, listener, errHandler, rawEmitter);
|
||||
if (!watcher)
|
||||
return;
|
||||
return watcher.close.bind(watcher);
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_ERR, errHandler);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
watcher = createFsWatchInstance(path, options, fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS), errHandler, // no need to use broadcast here
|
||||
fsWatchBroadcast.bind(null, fullPath, KEY_RAW));
|
||||
if (!watcher)
|
||||
return;
|
||||
watcher.on(EV.ERROR, async (error) => {
|
||||
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
|
||||
if (cont)
|
||||
cont.watcherUnusable = true; // documented since Node 10.4.1
|
||||
// Workaround for https://github.com/joyent/node/issues/4337
|
||||
if (exports.isWindows && error.code === 'EPERM') {
|
||||
try {
|
||||
const fd = await (0, promises_1.open)(path, 'r');
|
||||
await fd.close();
|
||||
broadcastErr(error);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
else {
|
||||
broadcastErr(error);
|
||||
}
|
||||
});
|
||||
cont = {
|
||||
listeners: listener,
|
||||
errHandlers: errHandler,
|
||||
rawEmitters: rawEmitter,
|
||||
watcher,
|
||||
};
|
||||
FsWatchInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// removes this instance's listeners and closes the underlying fs_watch
|
||||
// instance if there are no more listeners left
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_ERR, errHandler);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
// Check to protect against issue gh-730.
|
||||
// if (cont.watcherUnusable) {
|
||||
cont.watcher.close();
|
||||
// }
|
||||
FsWatchInstances.delete(fullPath);
|
||||
HANDLER_KEYS.forEach(clearItem(cont));
|
||||
// @ts-ignore
|
||||
cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
// fs_watchFile helpers
|
||||
// object to hold per-process fs_watchFile instances
|
||||
// (may be shared across chokidar FSWatcher instances)
|
||||
const FsWatchFileInstances = new Map();
|
||||
/**
|
||||
* Instantiates the fs_watchFile interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param path to be watched
|
||||
* @param fullPath absolute path
|
||||
* @param options options to be passed to fs_watchFile
|
||||
* @param handlers container for event listener functions
|
||||
* @returns closer
|
||||
*/
|
||||
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
|
||||
const { listener, rawEmitter } = handlers;
|
||||
let cont = FsWatchFileInstances.get(fullPath);
|
||||
// let listeners = new Set();
|
||||
// let rawEmitters = new Set();
|
||||
const copts = cont && cont.options;
|
||||
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
|
||||
// "Upgrade" the watcher to persistence or a quicker interval.
|
||||
// This creates some unlikely edge case issues if the user mixes
|
||||
// settings in a very weird way, but solving for those cases
|
||||
// doesn't seem worthwhile for the added complexity.
|
||||
// listeners = cont.listeners;
|
||||
// rawEmitters = cont.rawEmitters;
|
||||
(0, fs_1.unwatchFile)(fullPath);
|
||||
cont = undefined;
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
}
|
||||
else {
|
||||
// TODO
|
||||
// listeners.add(listener);
|
||||
// rawEmitters.add(rawEmitter);
|
||||
cont = {
|
||||
listeners: listener,
|
||||
rawEmitters: rawEmitter,
|
||||
options,
|
||||
watcher: (0, fs_1.watchFile)(fullPath, options, (curr, prev) => {
|
||||
foreach(cont.rawEmitters, (rawEmitter) => {
|
||||
rawEmitter(EV.CHANGE, fullPath, { curr, prev });
|
||||
});
|
||||
const currmtime = curr.mtimeMs;
|
||||
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
|
||||
foreach(cont.listeners, (listener) => listener(path, curr));
|
||||
}
|
||||
}),
|
||||
};
|
||||
FsWatchFileInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
// Removes this instance's listeners and closes the underlying fs_watchFile
|
||||
// instance if there are no more listeners left.
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
FsWatchFileInstances.delete(fullPath);
|
||||
(0, fs_1.unwatchFile)(fullPath);
|
||||
cont.options = cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
class NodeFsHandler {
|
||||
constructor(fsW) {
|
||||
this.fsw = fsW;
|
||||
this._boundHandleError = (error) => fsW._handleError(error);
|
||||
}
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param path to file or dir
|
||||
* @param listener on fs change
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path, listener) {
|
||||
const opts = this.fsw.options;
|
||||
const directory = sysPath.dirname(path);
|
||||
const basename = sysPath.basename(path);
|
||||
const parent = this.fsw._getWatchedDir(directory);
|
||||
parent.add(basename);
|
||||
const absolutePath = sysPath.resolve(path);
|
||||
const options = {
|
||||
persistent: opts.persistent,
|
||||
};
|
||||
if (!listener)
|
||||
listener = exports.EMPTY_FN;
|
||||
let closer;
|
||||
if (opts.usePolling) {
|
||||
const enableBin = opts.interval !== opts.binaryInterval;
|
||||
options.interval = enableBin && isBinaryPath(basename) ? opts.binaryInterval : opts.interval;
|
||||
closer = setFsWatchFileListener(path, absolutePath, options, {
|
||||
listener,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
else {
|
||||
closer = setFsWatchListener(path, absolutePath, options, {
|
||||
listener,
|
||||
errHandler: this._boundHandleError,
|
||||
rawEmitter: this.fsw._emitRaw,
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @returns closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file, stats, initialAdd) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const dirname = sysPath.dirname(file);
|
||||
const basename = sysPath.basename(file);
|
||||
const parent = this.fsw._getWatchedDir(dirname);
|
||||
// stats is always present
|
||||
let prevStats = stats;
|
||||
// if the file is already being watched, do nothing
|
||||
if (parent.has(basename))
|
||||
return;
|
||||
const listener = async (path, newStats) => {
|
||||
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5))
|
||||
return;
|
||||
if (!newStats || newStats.mtimeMs === 0) {
|
||||
try {
|
||||
const newStats = await (0, promises_1.stat)(file);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
if ((exports.isMacos || exports.isLinux || exports.isFreeBSD) && prevStats.ino !== newStats.ino) {
|
||||
this.fsw._closeFile(path);
|
||||
prevStats = newStats;
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
}
|
||||
else {
|
||||
prevStats = newStats;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Fix issues where mtime is null but file is still present
|
||||
this.fsw._remove(dirname, basename);
|
||||
}
|
||||
// add is about to be emitted if file not already tracked in parent
|
||||
}
|
||||
else if (parent.has(basename)) {
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV.CHANGE, file, newStats);
|
||||
}
|
||||
prevStats = newStats;
|
||||
}
|
||||
};
|
||||
// kick off the watcher
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
// emit an add event if we're supposed to
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
|
||||
if (!this.fsw._throttle(EV.ADD, file, 0))
|
||||
return;
|
||||
this.fsw._emit(EV.ADD, file, stats);
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param entry returned by readdirp
|
||||
* @param directory path of dir being read
|
||||
* @param path of this item
|
||||
* @param item basename of this item
|
||||
* @returns true if no more processing is needed for this entry.
|
||||
*/
|
||||
async _handleSymlink(entry, directory, path, item) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const full = entry.fullPath;
|
||||
const dir = this.fsw._getWatchedDir(directory);
|
||||
if (!this.fsw.options.followSymlinks) {
|
||||
// watch symlink directly (don't follow) and detect changes
|
||||
this.fsw._incrReadyCount();
|
||||
let linkPath;
|
||||
try {
|
||||
linkPath = await (0, promises_1.realpath)(path);
|
||||
}
|
||||
catch (e) {
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (dir.has(item)) {
|
||||
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.CHANGE, path, entry.stats);
|
||||
}
|
||||
}
|
||||
else {
|
||||
dir.add(item);
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV.ADD, path, entry.stats);
|
||||
}
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
// don't follow the same symlink more than once
|
||||
if (this.fsw._symlinkPaths.has(full)) {
|
||||
return true;
|
||||
}
|
||||
this.fsw._symlinkPaths.set(full, true);
|
||||
}
|
||||
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
|
||||
// Normalize the directory name on Windows
|
||||
directory = sysPath.join(directory, '');
|
||||
throttler = this.fsw._throttle('readdir', directory, 1000);
|
||||
if (!throttler)
|
||||
return;
|
||||
const previous = this.fsw._getWatchedDir(wh.path);
|
||||
const current = new Set();
|
||||
let stream = this.fsw._readdirp(directory, {
|
||||
fileFilter: (entry) => wh.filterPath(entry),
|
||||
directoryFilter: (entry) => wh.filterDir(entry),
|
||||
});
|
||||
if (!stream)
|
||||
return;
|
||||
stream
|
||||
.on(exports.STR_DATA, async (entry) => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const item = entry.path;
|
||||
let path = sysPath.join(directory, item);
|
||||
current.add(item);
|
||||
if (entry.stats.isSymbolicLink() &&
|
||||
(await this._handleSymlink(entry, directory, path, item))) {
|
||||
return;
|
||||
}
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
// Files that present in current directory snapshot
|
||||
// but absent in previous are added to watch list and
|
||||
// emit `add` event.
|
||||
if (item === target || (!target && !previous.has(item))) {
|
||||
this.fsw._incrReadyCount();
|
||||
// ensure relativeness of path is preserved in case of watcher reuse
|
||||
path = sysPath.join(dir, sysPath.relative(dir, path));
|
||||
this._addToNodeFs(path, initialAdd, wh, depth + 1);
|
||||
}
|
||||
})
|
||||
.on(EV.ERROR, this._boundHandleError);
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!stream)
|
||||
return reject();
|
||||
stream.once(exports.STR_END, () => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const wasThrottled = throttler ? throttler.clear() : false;
|
||||
resolve(undefined);
|
||||
// Files that absent in current directory snapshot
|
||||
// but present in previous emit `remove` event
|
||||
// and are removed from @watched[directory].
|
||||
previous
|
||||
.getChildren()
|
||||
.filter((item) => {
|
||||
return item !== directory && !current.has(item);
|
||||
})
|
||||
.forEach((item) => {
|
||||
this.fsw._remove(directory, item);
|
||||
});
|
||||
stream = undefined;
|
||||
// one more time for any missed in case changes came in extremely quickly
|
||||
if (wasThrottled)
|
||||
this._handleRead(directory, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param dir fs path
|
||||
* @param stats
|
||||
* @param initialAdd
|
||||
* @param depth relative to user-supplied path
|
||||
* @param target child path targeted for watch
|
||||
* @param wh Common watch helpers for this path
|
||||
* @param realpath
|
||||
* @returns closer for the watcher instance.
|
||||
*/
|
||||
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
|
||||
const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir));
|
||||
const tracked = parentDir.has(sysPath.basename(dir));
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
|
||||
this.fsw._emit(EV.ADD_DIR, dir, stats);
|
||||
}
|
||||
// ensure dir is tracked (harmless if redundant)
|
||||
parentDir.add(sysPath.basename(dir));
|
||||
this.fsw._getWatchedDir(dir);
|
||||
let throttler;
|
||||
let closer;
|
||||
const oDepth = this.fsw.options.depth;
|
||||
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
|
||||
if (!target) {
|
||||
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
}
|
||||
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
|
||||
// if current directory is removed, do nothing
|
||||
if (stats && stats.mtimeMs === 0)
|
||||
return;
|
||||
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param path to file or ir
|
||||
* @param initialAdd was the file added at watch instantiation?
|
||||
* @param priorWh depth relative to user-supplied path
|
||||
* @param depth Child path actually targeted for watch
|
||||
* @param target Child path actually targeted for watch
|
||||
*/
|
||||
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
|
||||
const ready = this.fsw._emitReady;
|
||||
if (this.fsw._isIgnored(path) || this.fsw.closed) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const wh = this.fsw._getWatchHelpers(path);
|
||||
if (priorWh) {
|
||||
wh.filterPath = (entry) => priorWh.filterPath(entry);
|
||||
wh.filterDir = (entry) => priorWh.filterDir(entry);
|
||||
}
|
||||
// evaluate what is at the path we're being asked to watch
|
||||
try {
|
||||
const stats = await statMethods[wh.statMethod](wh.watchPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
if (this.fsw._isIgnored(wh.watchPath, stats)) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
const follow = this.fsw.options.followSymlinks;
|
||||
let closer;
|
||||
if (stats.isDirectory()) {
|
||||
const absPath = sysPath.resolve(path);
|
||||
const targetPath = follow ? await (0, promises_1.realpath)(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (absPath !== targetPath && targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(absPath, targetPath);
|
||||
}
|
||||
}
|
||||
else if (stats.isSymbolicLink()) {
|
||||
const targetPath = follow ? await (0, promises_1.realpath)(path) : path;
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
const parent = sysPath.dirname(wh.watchPath);
|
||||
this.fsw._getWatchedDir(parent).add(wh.watchPath);
|
||||
this.fsw._emit(EV.ADD, wh.watchPath, stats);
|
||||
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
|
||||
if (this.fsw.closed)
|
||||
return;
|
||||
// preserve this symlink's target path
|
||||
if (targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
closer = this._handleFile(wh.watchPath, stats, initialAdd);
|
||||
}
|
||||
ready();
|
||||
if (closer)
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
return false;
|
||||
}
|
||||
catch (error) {
|
||||
if (this.fsw._handleError(error)) {
|
||||
ready();
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.NodeFsHandler = NodeFsHandler;
|
215
node_modules/chokidar/index.d.ts
generated
vendored
Normal file
215
node_modules/chokidar/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,215 @@
|
|||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
import { Stats } from 'fs';
|
||||
import { EventEmitter } from 'events';
|
||||
import { ReaddirpStream, ReaddirpOptions, EntryInfo } from 'readdirp';
|
||||
import { NodeFsHandler, EventName, Path, EVENTS as EV, WatchHandlers } from './handler.js';
|
||||
type AWF = {
|
||||
stabilityThreshold: number;
|
||||
pollInterval: number;
|
||||
};
|
||||
type BasicOpts = {
|
||||
persistent: boolean;
|
||||
ignoreInitial: boolean;
|
||||
followSymlinks: boolean;
|
||||
cwd?: string;
|
||||
usePolling: boolean;
|
||||
interval: number;
|
||||
binaryInterval: number;
|
||||
alwaysStat?: boolean;
|
||||
depth?: number;
|
||||
ignorePermissionErrors: boolean;
|
||||
atomic: boolean | number;
|
||||
};
|
||||
export type Throttler = {
|
||||
timeoutObject: NodeJS.Timeout;
|
||||
clear: () => void;
|
||||
count: number;
|
||||
};
|
||||
export type ChokidarOptions = Partial<BasicOpts & {
|
||||
ignored: Matcher | Matcher[];
|
||||
awaitWriteFinish: boolean | Partial<AWF>;
|
||||
}>;
|
||||
export type FSWInstanceOptions = BasicOpts & {
|
||||
ignored: Matcher[];
|
||||
awaitWriteFinish: false | AWF;
|
||||
};
|
||||
export type ThrottleType = 'readdir' | 'watch' | 'add' | 'remove' | 'change';
|
||||
export type EmitArgs = [path: Path, stats?: Stats];
|
||||
export type EmitErrorArgs = [error: Error, stats?: Stats];
|
||||
export type EmitArgsWithName = [event: EventName, ...EmitArgs];
|
||||
export type MatchFunction = (val: string, stats?: Stats) => boolean;
|
||||
export interface MatcherObject {
|
||||
path: string;
|
||||
recursive?: boolean;
|
||||
}
|
||||
export type Matcher = string | RegExp | MatchFunction | MatcherObject;
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
declare class DirEntry {
|
||||
path: Path;
|
||||
_removeWatcher: (dir: string, base: string) => void;
|
||||
items: Set<Path>;
|
||||
constructor(dir: Path, removeWatcher: (dir: string, base: string) => void);
|
||||
add(item: string): void;
|
||||
remove(item: string): Promise<void>;
|
||||
has(item: string): boolean | undefined;
|
||||
getChildren(): string[];
|
||||
dispose(): void;
|
||||
}
|
||||
export declare class WatchHelper {
|
||||
fsw: FSWatcher;
|
||||
path: string;
|
||||
watchPath: string;
|
||||
fullWatchPath: string;
|
||||
dirParts: string[][];
|
||||
followSymlinks: boolean;
|
||||
statMethod: 'stat' | 'lstat';
|
||||
constructor(path: string, follow: boolean, fsw: FSWatcher);
|
||||
entryPath(entry: EntryInfo): Path;
|
||||
filterPath(entry: EntryInfo): boolean;
|
||||
filterDir(entry: EntryInfo): boolean;
|
||||
}
|
||||
export interface FSWatcherKnownEventMap {
|
||||
[EV.READY]: [];
|
||||
[EV.RAW]: Parameters<WatchHandlers['rawEmitter']>;
|
||||
[EV.ERROR]: Parameters<WatchHandlers['errHandler']>;
|
||||
[EV.ALL]: [event: EventName, ...EmitArgs];
|
||||
}
|
||||
export type FSWatcherEventMap = FSWatcherKnownEventMap & {
|
||||
[k in Exclude<EventName, keyof FSWatcherKnownEventMap>]: EmitArgs;
|
||||
};
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
export declare class FSWatcher extends EventEmitter<FSWatcherEventMap> {
|
||||
closed: boolean;
|
||||
options: FSWInstanceOptions;
|
||||
_closers: Map<string, Array<any>>;
|
||||
_ignoredPaths: Set<Matcher>;
|
||||
_throttled: Map<ThrottleType, Map<any, any>>;
|
||||
_streams: Set<ReaddirpStream>;
|
||||
_symlinkPaths: Map<Path, string | boolean>;
|
||||
_watched: Map<string, DirEntry>;
|
||||
_pendingWrites: Map<string, any>;
|
||||
_pendingUnlinks: Map<string, EmitArgsWithName>;
|
||||
_readyCount: number;
|
||||
_emitReady: () => void;
|
||||
_closePromise?: Promise<void>;
|
||||
_userIgnored?: MatchFunction;
|
||||
_readyEmitted: boolean;
|
||||
_emitRaw: WatchHandlers['rawEmitter'];
|
||||
_boundRemove: (dir: string, item: string) => void;
|
||||
_nodeFsHandler: NodeFsHandler;
|
||||
constructor(_opts?: ChokidarOptions);
|
||||
_addIgnoredPath(matcher: Matcher): void;
|
||||
_removeIgnoredPath(matcher: Matcher): void;
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_: Path | Path[], _origAdd?: string, _internal?: boolean): FSWatcher;
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_: Path | Path[]): FSWatcher;
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close(): Promise<void>;
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched(): Record<string, string[]>;
|
||||
emitWithAll(event: EventName, args: EmitArgs): void;
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_emit(event: EventName, path: Path, stats?: Stats): Promise<this | undefined>;
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error: Error): Error | boolean;
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType: ThrottleType, path: Path, timeout: number): Throttler | false;
|
||||
_incrReadyCount(): number;
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path: Path, threshold: number, event: EventName, awfEmit: (err?: Error, stat?: Stats) => void): void;
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path: Path, stats?: Stats): boolean;
|
||||
_isntIgnored(path: Path, stat?: Stats): boolean;
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path: Path): WatchHelper;
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory: string): DirEntry;
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats: Stats): boolean;
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory: string, item: string, isDirectory?: boolean): void;
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path: Path): void;
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path: Path): void;
|
||||
_addPathCloser(path: Path, closer: () => void): void;
|
||||
_readdirp(root: Path, opts?: Partial<ReaddirpOptions>): ReaddirpStream | undefined;
|
||||
}
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
export declare function watch(paths: string | string[], options?: ChokidarOptions): FSWatcher;
|
||||
declare const _default: {
|
||||
watch: typeof watch;
|
||||
FSWatcher: typeof FSWatcher;
|
||||
};
|
||||
export default _default;
|
804
node_modules/chokidar/index.js
generated
vendored
Normal file
804
node_modules/chokidar/index.js
generated
vendored
Normal file
|
@ -0,0 +1,804 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.FSWatcher = exports.WatchHelper = void 0;
|
||||
exports.watch = watch;
|
||||
/*! chokidar - MIT License (c) 2012 Paul Miller (paulmillr.com) */
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
const events_1 = require("events");
|
||||
const sysPath = require("path");
|
||||
const readdirp_1 = require("readdirp");
|
||||
const handler_js_1 = require("./handler.js");
|
||||
const SLASH = '/';
|
||||
const SLASH_SLASH = '//';
|
||||
const ONE_DOT = '.';
|
||||
const TWO_DOTS = '..';
|
||||
const STRING_TYPE = 'string';
|
||||
const BACK_SLASH_RE = /\\/g;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
const DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
|
||||
const REPLACER_RE = /^\.[/\\]/;
|
||||
function arrify(item) {
|
||||
return Array.isArray(item) ? item : [item];
|
||||
}
|
||||
const isMatcherObject = (matcher) => typeof matcher === 'object' && matcher !== null && !(matcher instanceof RegExp);
|
||||
function createPattern(matcher) {
|
||||
if (typeof matcher === 'function')
|
||||
return matcher;
|
||||
if (typeof matcher === 'string')
|
||||
return (string) => matcher === string;
|
||||
if (matcher instanceof RegExp)
|
||||
return (string) => matcher.test(string);
|
||||
if (typeof matcher === 'object' && matcher !== null) {
|
||||
return (string) => {
|
||||
if (matcher.path === string)
|
||||
return true;
|
||||
if (matcher.recursive) {
|
||||
const relative = sysPath.relative(matcher.path, string);
|
||||
if (!relative) {
|
||||
return false;
|
||||
}
|
||||
return !relative.startsWith('..') && !sysPath.isAbsolute(relative);
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
return () => false;
|
||||
}
|
||||
function normalizePath(path) {
|
||||
if (typeof path !== 'string')
|
||||
throw new Error('string expected');
|
||||
path = sysPath.normalize(path);
|
||||
path = path.replace(/\\/g, '/');
|
||||
let prepend = false;
|
||||
if (path.startsWith('//'))
|
||||
prepend = true;
|
||||
const DOUBLE_SLASH_RE = /\/\//;
|
||||
while (path.match(DOUBLE_SLASH_RE))
|
||||
path = path.replace(DOUBLE_SLASH_RE, '/');
|
||||
if (prepend)
|
||||
path = '/' + path;
|
||||
return path;
|
||||
}
|
||||
function matchPatterns(patterns, testString, stats) {
|
||||
const path = normalizePath(testString);
|
||||
for (let index = 0; index < patterns.length; index++) {
|
||||
const pattern = patterns[index];
|
||||
if (pattern(path, stats)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function anymatch(matchers, testString) {
|
||||
if (matchers == null) {
|
||||
throw new TypeError('anymatch: specify first argument');
|
||||
}
|
||||
// Early cache for matchers.
|
||||
const matchersArray = arrify(matchers);
|
||||
const patterns = matchersArray.map((matcher) => createPattern(matcher));
|
||||
if (testString == null) {
|
||||
return (testString, stats) => {
|
||||
return matchPatterns(patterns, testString, stats);
|
||||
};
|
||||
}
|
||||
return matchPatterns(patterns, testString);
|
||||
}
|
||||
const unifyPaths = (paths_) => {
|
||||
const paths = arrify(paths_).flat();
|
||||
if (!paths.every((p) => typeof p === STRING_TYPE)) {
|
||||
throw new TypeError(`Non-string provided as watch path: ${paths}`);
|
||||
}
|
||||
return paths.map(normalizePathToUnix);
|
||||
};
|
||||
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
|
||||
// because "//StoragePC/DrivePool/Movies" is a valid network path
|
||||
const toUnix = (string) => {
|
||||
let str = string.replace(BACK_SLASH_RE, SLASH);
|
||||
let prepend = false;
|
||||
if (str.startsWith(SLASH_SLASH)) {
|
||||
prepend = true;
|
||||
}
|
||||
while (str.match(DOUBLE_SLASH_RE)) {
|
||||
str = str.replace(DOUBLE_SLASH_RE, SLASH);
|
||||
}
|
||||
if (prepend) {
|
||||
str = SLASH + str;
|
||||
}
|
||||
return str;
|
||||
};
|
||||
// Our version of upath.normalize
|
||||
// TODO: this is not equal to path-normalize module - investigate why
|
||||
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
|
||||
// TODO: refactor
|
||||
const normalizeIgnored = (cwd = '') => (path) => {
|
||||
if (typeof path === 'string') {
|
||||
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
|
||||
}
|
||||
else {
|
||||
return path;
|
||||
}
|
||||
};
|
||||
const getAbsolutePath = (path, cwd) => {
|
||||
if (sysPath.isAbsolute(path)) {
|
||||
return path;
|
||||
}
|
||||
return sysPath.join(cwd, path);
|
||||
};
|
||||
const EMPTY_SET = Object.freeze(new Set());
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
class DirEntry {
|
||||
constructor(dir, removeWatcher) {
|
||||
this.path = dir;
|
||||
this._removeWatcher = removeWatcher;
|
||||
this.items = new Set();
|
||||
}
|
||||
add(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
if (item !== ONE_DOT && item !== TWO_DOTS)
|
||||
items.add(item);
|
||||
}
|
||||
async remove(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
items.delete(item);
|
||||
if (items.size > 0)
|
||||
return;
|
||||
const dir = this.path;
|
||||
try {
|
||||
await (0, promises_1.readdir)(dir);
|
||||
}
|
||||
catch (err) {
|
||||
if (this._removeWatcher) {
|
||||
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
|
||||
}
|
||||
}
|
||||
}
|
||||
has(item) {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return;
|
||||
return items.has(item);
|
||||
}
|
||||
getChildren() {
|
||||
const { items } = this;
|
||||
if (!items)
|
||||
return [];
|
||||
return [...items.values()];
|
||||
}
|
||||
dispose() {
|
||||
this.items.clear();
|
||||
this.path = '';
|
||||
this._removeWatcher = handler_js_1.EMPTY_FN;
|
||||
this.items = EMPTY_SET;
|
||||
Object.freeze(this);
|
||||
}
|
||||
}
|
||||
const STAT_METHOD_F = 'stat';
|
||||
const STAT_METHOD_L = 'lstat';
|
||||
class WatchHelper {
|
||||
constructor(path, follow, fsw) {
|
||||
this.fsw = fsw;
|
||||
const watchPath = path;
|
||||
this.path = path = path.replace(REPLACER_RE, '');
|
||||
this.watchPath = watchPath;
|
||||
this.fullWatchPath = sysPath.resolve(watchPath);
|
||||
this.dirParts = [];
|
||||
this.dirParts.forEach((parts) => {
|
||||
if (parts.length > 1)
|
||||
parts.pop();
|
||||
});
|
||||
this.followSymlinks = follow;
|
||||
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
|
||||
}
|
||||
entryPath(entry) {
|
||||
return sysPath.join(this.watchPath, sysPath.relative(this.watchPath, entry.fullPath));
|
||||
}
|
||||
filterPath(entry) {
|
||||
const { stats } = entry;
|
||||
if (stats && stats.isSymbolicLink())
|
||||
return this.filterDir(entry);
|
||||
const resolvedPath = this.entryPath(entry);
|
||||
// TODO: what if stats is undefined? remove !
|
||||
return this.fsw._isntIgnored(resolvedPath, stats) && this.fsw._hasReadPermissions(stats);
|
||||
}
|
||||
filterDir(entry) {
|
||||
return this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
|
||||
}
|
||||
}
|
||||
exports.WatchHelper = WatchHelper;
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
class FSWatcher extends events_1.EventEmitter {
|
||||
// Not indenting methods for history sake; for now.
|
||||
constructor(_opts = {}) {
|
||||
super();
|
||||
this.closed = false;
|
||||
this._closers = new Map();
|
||||
this._ignoredPaths = new Set();
|
||||
this._throttled = new Map();
|
||||
this._streams = new Set();
|
||||
this._symlinkPaths = new Map();
|
||||
this._watched = new Map();
|
||||
this._pendingWrites = new Map();
|
||||
this._pendingUnlinks = new Map();
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
const awf = _opts.awaitWriteFinish;
|
||||
const DEF_AWF = { stabilityThreshold: 2000, pollInterval: 100 };
|
||||
const opts = {
|
||||
// Defaults
|
||||
persistent: true,
|
||||
ignoreInitial: false,
|
||||
ignorePermissionErrors: false,
|
||||
interval: 100,
|
||||
binaryInterval: 300,
|
||||
followSymlinks: true,
|
||||
usePolling: false,
|
||||
// useAsync: false,
|
||||
atomic: true, // NOTE: overwritten later (depends on usePolling)
|
||||
..._opts,
|
||||
// Change format
|
||||
ignored: _opts.ignored ? arrify(_opts.ignored) : arrify([]),
|
||||
awaitWriteFinish: awf === true ? DEF_AWF : typeof awf === 'object' ? { ...DEF_AWF, ...awf } : false,
|
||||
};
|
||||
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
|
||||
if (handler_js_1.isIBMi)
|
||||
opts.usePolling = true;
|
||||
// Editor atomic write normalization enabled by default with fs.watch
|
||||
if (opts.atomic === undefined)
|
||||
opts.atomic = !opts.usePolling;
|
||||
// opts.atomic = typeof _opts.atomic === 'number' ? _opts.atomic : 100;
|
||||
// Global override. Useful for developers, who need to force polling for all
|
||||
// instances of chokidar, regardless of usage / dependency depth
|
||||
const envPoll = process.env.CHOKIDAR_USEPOLLING;
|
||||
if (envPoll !== undefined) {
|
||||
const envLower = envPoll.toLowerCase();
|
||||
if (envLower === 'false' || envLower === '0')
|
||||
opts.usePolling = false;
|
||||
else if (envLower === 'true' || envLower === '1')
|
||||
opts.usePolling = true;
|
||||
else
|
||||
opts.usePolling = !!envLower;
|
||||
}
|
||||
const envInterval = process.env.CHOKIDAR_INTERVAL;
|
||||
if (envInterval)
|
||||
opts.interval = Number.parseInt(envInterval, 10);
|
||||
// This is done to emit ready only once, but each 'add' will increase that?
|
||||
let readyCalls = 0;
|
||||
this._emitReady = () => {
|
||||
readyCalls++;
|
||||
if (readyCalls >= this._readyCount) {
|
||||
this._emitReady = handler_js_1.EMPTY_FN;
|
||||
this._readyEmitted = true;
|
||||
// use process.nextTick to allow time for listener to be bound
|
||||
process.nextTick(() => this.emit(handler_js_1.EVENTS.READY));
|
||||
}
|
||||
};
|
||||
this._emitRaw = (...args) => this.emit(handler_js_1.EVENTS.RAW, ...args);
|
||||
this._boundRemove = this._remove.bind(this);
|
||||
this.options = opts;
|
||||
this._nodeFsHandler = new handler_js_1.NodeFsHandler(this);
|
||||
// You’re frozen when your heart’s not open.
|
||||
Object.freeze(opts);
|
||||
}
|
||||
_addIgnoredPath(matcher) {
|
||||
if (isMatcherObject(matcher)) {
|
||||
// return early if we already have a deeply equal matcher object
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
if (isMatcherObject(ignored) &&
|
||||
ignored.path === matcher.path &&
|
||||
ignored.recursive === matcher.recursive) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
this._ignoredPaths.add(matcher);
|
||||
}
|
||||
_removeIgnoredPath(matcher) {
|
||||
this._ignoredPaths.delete(matcher);
|
||||
// now find any matcher objects with the matcher as path
|
||||
if (typeof matcher === 'string') {
|
||||
for (const ignored of this._ignoredPaths) {
|
||||
// TODO (43081j): make this more efficient.
|
||||
// probably just make a `this._ignoredDirectories` or some
|
||||
// such thing.
|
||||
if (isMatcherObject(ignored) && ignored.path === matcher) {
|
||||
this._ignoredPaths.delete(ignored);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Public methods
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance.
|
||||
* @param paths_ file or file list. Other arguments are unused
|
||||
*/
|
||||
add(paths_, _origAdd, _internal) {
|
||||
const { cwd } = this.options;
|
||||
this.closed = false;
|
||||
this._closePromise = undefined;
|
||||
let paths = unifyPaths(paths_);
|
||||
if (cwd) {
|
||||
paths = paths.map((path) => {
|
||||
const absPath = getAbsolutePath(path, cwd);
|
||||
// Check `path` instead of `absPath` because the cwd portion can't be a glob
|
||||
return absPath;
|
||||
});
|
||||
}
|
||||
paths.forEach((path) => {
|
||||
this._removeIgnoredPath(path);
|
||||
});
|
||||
this._userIgnored = undefined;
|
||||
if (!this._readyCount)
|
||||
this._readyCount = 0;
|
||||
this._readyCount += paths.length;
|
||||
Promise.all(paths.map(async (path) => {
|
||||
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, undefined, 0, _origAdd);
|
||||
if (res)
|
||||
this._emitReady();
|
||||
return res;
|
||||
})).then((results) => {
|
||||
if (this.closed)
|
||||
return;
|
||||
results.forEach((item) => {
|
||||
if (item)
|
||||
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
|
||||
});
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
*/
|
||||
unwatch(paths_) {
|
||||
if (this.closed)
|
||||
return this;
|
||||
const paths = unifyPaths(paths_);
|
||||
const { cwd } = this.options;
|
||||
paths.forEach((path) => {
|
||||
// convert to absolute path unless relative path already matches
|
||||
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
|
||||
if (cwd)
|
||||
path = sysPath.join(cwd, path);
|
||||
path = sysPath.resolve(path);
|
||||
}
|
||||
this._closePath(path);
|
||||
this._addIgnoredPath(path);
|
||||
if (this._watched.has(path)) {
|
||||
this._addIgnoredPath({
|
||||
path,
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
// reset the cached userIgnored anymatch fn
|
||||
// to make ignoredPaths changes effective
|
||||
this._userIgnored = undefined;
|
||||
});
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
*/
|
||||
close() {
|
||||
if (this._closePromise) {
|
||||
return this._closePromise;
|
||||
}
|
||||
this.closed = true;
|
||||
// Memory management.
|
||||
this.removeAllListeners();
|
||||
const closers = [];
|
||||
this._closers.forEach((closerList) => closerList.forEach((closer) => {
|
||||
const promise = closer();
|
||||
if (promise instanceof Promise)
|
||||
closers.push(promise);
|
||||
}));
|
||||
this._streams.forEach((stream) => stream.destroy());
|
||||
this._userIgnored = undefined;
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
this._watched.forEach((dirent) => dirent.dispose());
|
||||
this._closers.clear();
|
||||
this._watched.clear();
|
||||
this._streams.clear();
|
||||
this._symlinkPaths.clear();
|
||||
this._throttled.clear();
|
||||
this._closePromise = closers.length
|
||||
? Promise.all(closers).then(() => undefined)
|
||||
: Promise.resolve();
|
||||
return this._closePromise;
|
||||
}
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns for chaining
|
||||
*/
|
||||
getWatched() {
|
||||
const watchList = {};
|
||||
this._watched.forEach((entry, dir) => {
|
||||
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
|
||||
const index = key || ONE_DOT;
|
||||
watchList[index] = entry.getChildren().sort();
|
||||
});
|
||||
return watchList;
|
||||
}
|
||||
emitWithAll(event, args) {
|
||||
this.emit(event, ...args);
|
||||
if (event !== handler_js_1.EVENTS.ERROR)
|
||||
this.emit(handler_js_1.EVENTS.ALL, event, ...args);
|
||||
}
|
||||
// Common helpers
|
||||
// --------------
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param event Type of event
|
||||
* @param path File or directory path
|
||||
* @param stats arguments to be passed with event
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
async _emit(event, path, stats) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const opts = this.options;
|
||||
if (handler_js_1.isWindows)
|
||||
path = sysPath.normalize(path);
|
||||
if (opts.cwd)
|
||||
path = sysPath.relative(opts.cwd, path);
|
||||
const args = [path];
|
||||
if (stats != null)
|
||||
args.push(stats);
|
||||
const awf = opts.awaitWriteFinish;
|
||||
let pw;
|
||||
if (awf && (pw = this._pendingWrites.get(path))) {
|
||||
pw.lastChange = new Date();
|
||||
return this;
|
||||
}
|
||||
if (opts.atomic) {
|
||||
if (event === handler_js_1.EVENTS.UNLINK) {
|
||||
this._pendingUnlinks.set(path, [event, ...args]);
|
||||
setTimeout(() => {
|
||||
this._pendingUnlinks.forEach((entry, path) => {
|
||||
this.emit(...entry);
|
||||
this.emit(handler_js_1.EVENTS.ALL, ...entry);
|
||||
this._pendingUnlinks.delete(path);
|
||||
});
|
||||
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
|
||||
return this;
|
||||
}
|
||||
if (event === handler_js_1.EVENTS.ADD && this._pendingUnlinks.has(path)) {
|
||||
event = handler_js_1.EVENTS.CHANGE;
|
||||
this._pendingUnlinks.delete(path);
|
||||
}
|
||||
}
|
||||
if (awf && (event === handler_js_1.EVENTS.ADD || event === handler_js_1.EVENTS.CHANGE) && this._readyEmitted) {
|
||||
const awfEmit = (err, stats) => {
|
||||
if (err) {
|
||||
event = handler_js_1.EVENTS.ERROR;
|
||||
args[0] = err;
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
else if (stats) {
|
||||
// if stats doesn't exist the file must have been deleted
|
||||
if (args.length > 1) {
|
||||
args[1] = stats;
|
||||
}
|
||||
else {
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
};
|
||||
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
|
||||
return this;
|
||||
}
|
||||
if (event === handler_js_1.EVENTS.CHANGE) {
|
||||
const isThrottled = !this._throttle(handler_js_1.EVENTS.CHANGE, path, 50);
|
||||
if (isThrottled)
|
||||
return this;
|
||||
}
|
||||
if (opts.alwaysStat &&
|
||||
stats === undefined &&
|
||||
(event === handler_js_1.EVENTS.ADD || event === handler_js_1.EVENTS.ADD_DIR || event === handler_js_1.EVENTS.CHANGE)) {
|
||||
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
|
||||
let stats;
|
||||
try {
|
||||
stats = await (0, promises_1.stat)(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
|
||||
if (!stats || this.closed)
|
||||
return;
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @returns The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error) {
|
||||
const code = error && error.code;
|
||||
if (error &&
|
||||
code !== 'ENOENT' &&
|
||||
code !== 'ENOTDIR' &&
|
||||
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))) {
|
||||
this.emit(handler_js_1.EVENTS.ERROR, error);
|
||||
}
|
||||
return error || this.closed;
|
||||
}
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param actionType type being throttled
|
||||
* @param path being acted upon
|
||||
* @param timeout duration of time to suppress duplicate actions
|
||||
* @returns tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType, path, timeout) {
|
||||
if (!this._throttled.has(actionType)) {
|
||||
this._throttled.set(actionType, new Map());
|
||||
}
|
||||
const action = this._throttled.get(actionType);
|
||||
if (!action)
|
||||
throw new Error('invalid throttle');
|
||||
const actionPath = action.get(path);
|
||||
if (actionPath) {
|
||||
actionPath.count++;
|
||||
return false;
|
||||
}
|
||||
// eslint-disable-next-line prefer-const
|
||||
let timeoutObject;
|
||||
const clear = () => {
|
||||
const item = action.get(path);
|
||||
const count = item ? item.count : 0;
|
||||
action.delete(path);
|
||||
clearTimeout(timeoutObject);
|
||||
if (item)
|
||||
clearTimeout(item.timeoutObject);
|
||||
return count;
|
||||
};
|
||||
timeoutObject = setTimeout(clear, timeout);
|
||||
const thr = { timeoutObject, clear, count: 0 };
|
||||
action.set(path, thr);
|
||||
return thr;
|
||||
}
|
||||
_incrReadyCount() {
|
||||
return this._readyCount++;
|
||||
}
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param path being acted upon
|
||||
* @param threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param event
|
||||
* @param awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path, threshold, event, awfEmit) {
|
||||
const awf = this.options.awaitWriteFinish;
|
||||
if (typeof awf !== 'object')
|
||||
return;
|
||||
const pollInterval = awf.pollInterval;
|
||||
let timeoutHandler;
|
||||
let fullPath = path;
|
||||
if (this.options.cwd && !sysPath.isAbsolute(path)) {
|
||||
fullPath = sysPath.join(this.options.cwd, path);
|
||||
}
|
||||
const now = new Date();
|
||||
const writes = this._pendingWrites;
|
||||
function awaitWriteFinishFn(prevStat) {
|
||||
(0, fs_1.stat)(fullPath, (err, curStat) => {
|
||||
if (err || !writes.has(path)) {
|
||||
if (err && err.code !== 'ENOENT')
|
||||
awfEmit(err);
|
||||
return;
|
||||
}
|
||||
const now = Number(new Date());
|
||||
if (prevStat && curStat.size !== prevStat.size) {
|
||||
writes.get(path).lastChange = now;
|
||||
}
|
||||
const pw = writes.get(path);
|
||||
const df = now - pw.lastChange;
|
||||
if (df >= threshold) {
|
||||
writes.delete(path);
|
||||
awfEmit(undefined, curStat);
|
||||
}
|
||||
else {
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval, curStat);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!writes.has(path)) {
|
||||
writes.set(path, {
|
||||
lastChange: now,
|
||||
cancelWait: () => {
|
||||
writes.delete(path);
|
||||
clearTimeout(timeoutHandler);
|
||||
return event;
|
||||
},
|
||||
});
|
||||
timeoutHandler = setTimeout(awaitWriteFinishFn, pollInterval);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
*/
|
||||
_isIgnored(path, stats) {
|
||||
if (this.options.atomic && DOT_RE.test(path))
|
||||
return true;
|
||||
if (!this._userIgnored) {
|
||||
const { cwd } = this.options;
|
||||
const ign = this.options.ignored;
|
||||
const ignored = (ign || []).map(normalizeIgnored(cwd));
|
||||
const ignoredPaths = [...this._ignoredPaths];
|
||||
const list = [...ignoredPaths.map(normalizeIgnored(cwd)), ...ignored];
|
||||
this._userIgnored = anymatch(list, undefined);
|
||||
}
|
||||
return this._userIgnored(path, stats);
|
||||
}
|
||||
_isntIgnored(path, stat) {
|
||||
return !this._isIgnored(path, stat);
|
||||
}
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink handling.
|
||||
* @param path file or directory pattern being watched
|
||||
*/
|
||||
_getWatchHelpers(path) {
|
||||
return new WatchHelper(path, this.options.followSymlinks, this);
|
||||
}
|
||||
// Directory helpers
|
||||
// -----------------
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param directory path of the directory
|
||||
*/
|
||||
_getWatchedDir(directory) {
|
||||
const dir = sysPath.resolve(directory);
|
||||
if (!this._watched.has(dir))
|
||||
this._watched.set(dir, new DirEntry(dir, this._boundRemove));
|
||||
return this._watched.get(dir);
|
||||
}
|
||||
// File helpers
|
||||
// ------------
|
||||
/**
|
||||
* Check for read permissions: https://stackoverflow.com/a/11781404/1358405
|
||||
*/
|
||||
_hasReadPermissions(stats) {
|
||||
if (this.options.ignorePermissionErrors)
|
||||
return true;
|
||||
return Boolean(Number(stats.mode) & 0o400);
|
||||
}
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param directory within which the following item is located
|
||||
* @param item base path of item/directory
|
||||
*/
|
||||
_remove(directory, item, isDirectory) {
|
||||
// if what is being deleted is a directory, get that directory's paths
|
||||
// for recursive deleting and cleaning of watched object
|
||||
// if it is not a directory, nestedDirectoryChildren will be empty array
|
||||
const path = sysPath.join(directory, item);
|
||||
const fullPath = sysPath.resolve(path);
|
||||
isDirectory =
|
||||
isDirectory != null ? isDirectory : this._watched.has(path) || this._watched.has(fullPath);
|
||||
// prevent duplicate handling in case of arriving here nearly simultaneously
|
||||
// via multiple paths (such as _handleFile and _handleDir)
|
||||
if (!this._throttle('remove', path, 100))
|
||||
return;
|
||||
// if the only watched file is removed, watch for its return
|
||||
if (!isDirectory && this._watched.size === 1) {
|
||||
this.add(directory, item, true);
|
||||
}
|
||||
// This will create a new entry in the watched object in either case
|
||||
// so we got to do the directory check beforehand
|
||||
const wp = this._getWatchedDir(path);
|
||||
const nestedDirectoryChildren = wp.getChildren();
|
||||
// Recursively remove children directories / files.
|
||||
nestedDirectoryChildren.forEach((nested) => this._remove(path, nested));
|
||||
// Check if item was on the watched list and remove it
|
||||
const parent = this._getWatchedDir(directory);
|
||||
const wasTracked = parent.has(item);
|
||||
parent.remove(item);
|
||||
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
|
||||
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
|
||||
// but never removed from the map in case the path was deleted.
|
||||
// This leads to an incorrect state if the path was recreated:
|
||||
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
|
||||
if (this._symlinkPaths.has(fullPath)) {
|
||||
this._symlinkPaths.delete(fullPath);
|
||||
}
|
||||
// If we wait for this file to be fully written, cancel the wait.
|
||||
let relPath = path;
|
||||
if (this.options.cwd)
|
||||
relPath = sysPath.relative(this.options.cwd, path);
|
||||
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
|
||||
const event = this._pendingWrites.get(relPath).cancelWait();
|
||||
if (event === handler_js_1.EVENTS.ADD)
|
||||
return;
|
||||
}
|
||||
// The Entry will either be a directory that just got removed
|
||||
// or a bogus entry to a file, in either case we have to remove it
|
||||
this._watched.delete(path);
|
||||
this._watched.delete(fullPath);
|
||||
const eventName = isDirectory ? handler_js_1.EVENTS.UNLINK_DIR : handler_js_1.EVENTS.UNLINK;
|
||||
if (wasTracked && !this._isIgnored(path))
|
||||
this._emit(eventName, path);
|
||||
// Avoid conflicts if we later create another file with the same name
|
||||
this._closePath(path);
|
||||
}
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
*/
|
||||
_closePath(path) {
|
||||
this._closeFile(path);
|
||||
const dir = sysPath.dirname(path);
|
||||
this._getWatchedDir(dir).remove(sysPath.basename(path));
|
||||
}
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
*/
|
||||
_closeFile(path) {
|
||||
const closers = this._closers.get(path);
|
||||
if (!closers)
|
||||
return;
|
||||
closers.forEach((closer) => closer());
|
||||
this._closers.delete(path);
|
||||
}
|
||||
_addPathCloser(path, closer) {
|
||||
if (!closer)
|
||||
return;
|
||||
let list = this._closers.get(path);
|
||||
if (!list) {
|
||||
list = [];
|
||||
this._closers.set(path, list);
|
||||
}
|
||||
list.push(closer);
|
||||
}
|
||||
_readdirp(root, opts) {
|
||||
if (this.closed)
|
||||
return;
|
||||
const options = { type: handler_js_1.EVENTS.ALL, alwaysStat: true, lstat: true, ...opts, depth: 0 };
|
||||
let stream = (0, readdirp_1.readdirp)(root, options);
|
||||
this._streams.add(stream);
|
||||
stream.once(handler_js_1.STR_CLOSE, () => {
|
||||
stream = undefined;
|
||||
});
|
||||
stream.once(handler_js_1.STR_END, () => {
|
||||
if (stream) {
|
||||
this._streams.delete(stream);
|
||||
stream = undefined;
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
exports.FSWatcher = FSWatcher;
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param paths file / directory paths
|
||||
* @param options opts, such as `atomic`, `awaitWriteFinish`, `ignored`, and others
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
* @example
|
||||
* const watcher = watch('.').on('all', (event, path) => { console.log(event, path); });
|
||||
* watch('.', { atomic: true, awaitWriteFinish: true, ignored: (f, stats) => stats?.isFile() && !f.endsWith('.js') })
|
||||
*/
|
||||
function watch(paths, options = {}) {
|
||||
const watcher = new FSWatcher(options);
|
||||
watcher.add(paths);
|
||||
return watcher;
|
||||
}
|
||||
exports.default = { watch, FSWatcher };
|
69
node_modules/chokidar/package.json
generated
vendored
Normal file
69
node_modules/chokidar/package.json
generated
vendored
Normal file
|
@ -0,0 +1,69 @@
|
|||
{
|
||||
"name": "chokidar",
|
||||
"description": "Minimal and efficient cross-platform file watching library",
|
||||
"version": "4.0.3",
|
||||
"homepage": "https://github.com/paulmillr/chokidar",
|
||||
"author": "Paul Miller (https://paulmillr.com)",
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"handler.js",
|
||||
"handler.d.ts",
|
||||
"esm"
|
||||
],
|
||||
"main": "./index.js",
|
||||
"module": "./esm/index.js",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./esm/index.js",
|
||||
"require": "./index.js"
|
||||
},
|
||||
"./handler.js": {
|
||||
"import": "./esm/handler.js",
|
||||
"require": "./handler.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"readdirp": "^4.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@paulmillr/jsbt": "0.2.1",
|
||||
"@types/node": "20.14.8",
|
||||
"chai": "4.3.4",
|
||||
"prettier": "3.1.1",
|
||||
"rimraf": "5.0.5",
|
||||
"sinon": "12.0.1",
|
||||
"sinon-chai": "3.7.0",
|
||||
"typescript": "5.5.2",
|
||||
"upath": "2.0.1"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"engines": {
|
||||
"node": ">= 14.16.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/paulmillr/chokidar.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/paulmillr/chokidar/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc && tsc -p tsconfig.esm.json",
|
||||
"lint": "prettier --check src",
|
||||
"format": "prettier --write src",
|
||||
"test": "node --test"
|
||||
},
|
||||
"keywords": [
|
||||
"fs",
|
||||
"watch",
|
||||
"watchFile",
|
||||
"watcher",
|
||||
"watching",
|
||||
"file",
|
||||
"fsevents"
|
||||
],
|
||||
"funding": "https://paulmillr.com/funding/"
|
||||
}
|
21
node_modules/readdirp/LICENSE
generated
vendored
Normal file
21
node_modules/readdirp/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
120
node_modules/readdirp/README.md
generated
vendored
Normal file
120
node_modules/readdirp/README.md
generated
vendored
Normal file
|
@ -0,0 +1,120 @@
|
|||
# readdirp [](https://github.com/paulmillr/readdirp)
|
||||
|
||||
Recursive version of fs.readdir. Exposes a **stream API** (with small RAM & CPU footprint) and a **promise API**.
|
||||
|
||||
```sh
|
||||
npm install readdirp
|
||||
jsr add jsr:@paulmillr/readdirp
|
||||
```
|
||||
|
||||
```javascript
|
||||
// Use streams to achieve small RAM & CPU footprint.
|
||||
// 1) Streams example with for-await.
|
||||
import readdirp from 'readdirp';
|
||||
for await (const entry of readdirp('.')) {
|
||||
const {path} = entry;
|
||||
console.log(`${JSON.stringify({path})}`);
|
||||
}
|
||||
|
||||
// 2) Streams example, non for-await.
|
||||
// Print out all JS files along with their size within the current folder & subfolders.
|
||||
import readdirp from 'readdirp';
|
||||
readdirp('.', {alwaysStat: true, fileFilter: (f) => f.basename.endsWith('.js')})
|
||||
.on('data', (entry) => {
|
||||
const {path, stats: {size}} = entry;
|
||||
console.log(`${JSON.stringify({path, size})}`);
|
||||
})
|
||||
// Optionally call stream.destroy() in `warn()` in order to abort and cause 'close' to be emitted
|
||||
.on('warn', error => console.error('non-fatal error', error))
|
||||
.on('error', error => console.error('fatal error', error))
|
||||
.on('end', () => console.log('done'));
|
||||
|
||||
// 3) Promise example. More RAM and CPU than streams / for-await.
|
||||
import { readdirpPromise } from 'readdirp';
|
||||
const files = await readdirpPromise('.');
|
||||
console.log(files.map(file => file.path));
|
||||
|
||||
// Other options.
|
||||
import readdirp from 'readdirp';
|
||||
readdirp('test', {
|
||||
fileFilter: (f) => f.basename.endsWith('.js'),
|
||||
directoryFilter: (d) => d.basename !== '.git',
|
||||
// directoryFilter: (di) => di.basename.length === 9
|
||||
type: 'files_directories',
|
||||
depth: 1
|
||||
});
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
`const stream = readdirp(root[, options])` — **Stream API**
|
||||
|
||||
- Reads given root recursively and returns a `stream` of [entry infos](#entryinfo)
|
||||
- Optionally can be used like `for await (const entry of stream)` with node.js 10+ (`asyncIterator`).
|
||||
- `on('data', (entry) => {})` [entry info](#entryinfo) for every file / dir.
|
||||
- `on('warn', (error) => {})` non-fatal `Error` that prevents a file / dir from being processed. Example: inaccessible to the user.
|
||||
- `on('error', (error) => {})` fatal `Error` which also ends the stream. Example: illegal options where passed.
|
||||
- `on('end')` — we are done. Called when all entries were found and no more will be emitted.
|
||||
- `on('close')` — stream is destroyed via `stream.destroy()`.
|
||||
Could be useful if you want to manually abort even on a non fatal error.
|
||||
At that point the stream is no longer `readable` and no more entries, warning or errors are emitted
|
||||
- To learn more about streams, consult the very detailed [nodejs streams documentation](https://nodejs.org/api/stream.html)
|
||||
or the [stream-handbook](https://github.com/substack/stream-handbook)
|
||||
|
||||
`const entries = await readdirp.promise(root[, options])` — **Promise API**. Returns a list of [entry infos](#entryinfo).
|
||||
|
||||
First argument is awalys `root`, path in which to start reading and recursing into subdirectories.
|
||||
|
||||
### options
|
||||
|
||||
- `fileFilter`: filter to include or exclude files
|
||||
- **Function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry
|
||||
- `directoryFilter`: filter to include/exclude directories found and to recurse into. Directories that do not pass a filter will not be recursed into.
|
||||
- `depth: 5`: depth at which to stop recursing even if more subdirectories are found
|
||||
- `type: 'files'`: determines if data events on the stream should be emitted for `'files'` (default), `'directories'`, `'files_directories'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes.
|
||||
- `alwaysStat: false`: always return `stats` property for every file. Default is `false`, readdirp will return `Dirent` entries. Setting it to `true` can double readdir execution time - use it only when you need file `size`, `mtime` etc. Cannot be enabled on node <10.10.0.
|
||||
- `lstat: false`: include symlink entries in the stream along with files. When `true`, `fs.lstat` would be used instead of `fs.stat`
|
||||
|
||||
### `EntryInfo`
|
||||
|
||||
Has the following properties:
|
||||
|
||||
- `path: 'assets/javascripts/react.js'`: path to the file/directory (relative to given root)
|
||||
- `fullPath: '/Users/dev/projects/app/assets/javascripts/react.js'`: full path to the file/directory found
|
||||
- `basename: 'react.js'`: name of the file/directory
|
||||
- `dirent: fs.Dirent`: built-in [dir entry object](https://nodejs.org/api/fs.html#fs_class_fs_dirent) - only with `alwaysStat: false`
|
||||
- `stats: fs.Stats`: built in [stat object](https://nodejs.org/api/fs.html#fs_class_fs_stats) - only with `alwaysStat: true`
|
||||
|
||||
## Changelog
|
||||
|
||||
- 4.0 (Aug 25, 2024) rewritten in typescript, producing hybrid common.js / esm module.
|
||||
- Remove glob support and all dependencies
|
||||
- Make sure you're using `let {readdirp} = require('readdirp')` in common.js
|
||||
- 3.5 (Oct 13, 2020) disallows recursive directory-based symlinks.
|
||||
Before, it could have entered infinite loop.
|
||||
- 3.4 (Mar 19, 2020) adds support for directory-based symlinks.
|
||||
- 3.3 (Dec 6, 2019) stabilizes RAM consumption and enables perf management with `highWaterMark` option. Fixes race conditions related to `for-await` looping.
|
||||
- 3.2 (Oct 14, 2019) improves performance by 250% and makes streams implementation more idiomatic.
|
||||
- 3.1 (Jul 7, 2019) brings `bigint` support to `stat` output on Windows. This is backwards-incompatible for some cases. Be careful. It you use it incorrectly, you'll see "TypeError: Cannot mix BigInt and other types, use explicit conversions".
|
||||
- 3.0 brings huge performance improvements and stream backpressure support.
|
||||
- Upgrading 2.x to 3.x:
|
||||
- Signature changed from `readdirp(options)` to `readdirp(root, options)`
|
||||
- Replaced callback API with promise API.
|
||||
- Renamed `entryType` option to `type`
|
||||
- Renamed `entryType: 'both'` to `'files_directories'`
|
||||
- `EntryInfo`
|
||||
- Renamed `stat` to `stats`
|
||||
- Emitted only when `alwaysStat: true`
|
||||
- `dirent` is emitted instead of `stats` by default with `alwaysStat: false`
|
||||
- Renamed `name` to `basename`
|
||||
- Removed `parentDir` and `fullParentDir` properties
|
||||
- Supported node.js versions:
|
||||
- 4.x: node 14+
|
||||
- 3.x: node 8+
|
||||
- 2.x: node 0.6+
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (<https://paulmillr.com>)
|
||||
|
||||
MIT License, see [LICENSE](LICENSE) file.
|
108
node_modules/readdirp/esm/index.d.ts
generated
vendored
Normal file
108
node_modules/readdirp/esm/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
/**
|
||||
* Recursive version of readdir. Exposes a streaming API and promise API.
|
||||
* Streaming API allows to use a small amount of RAM.
|
||||
*
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import readdirp from 'readdirp';
|
||||
for await (const entry of readdirp('.')) {
|
||||
const {path} = entry;
|
||||
console.log(`${JSON.stringify({path})}`);
|
||||
}
|
||||
```
|
||||
*/
|
||||
/*! readdirp - MIT License (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) */
|
||||
import type { Stats, Dirent } from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
/** Path in file system. */
|
||||
export type Path = string;
|
||||
/** Emitted entry. Contains relative & absolute path, basename, and either stats or dirent. */
|
||||
export interface EntryInfo {
|
||||
path: string;
|
||||
fullPath: string;
|
||||
stats?: Stats;
|
||||
dirent?: Dirent;
|
||||
basename: string;
|
||||
}
|
||||
/** Path or dir entries (files) */
|
||||
export type PathOrDirent = Dirent | Path;
|
||||
/** Filterer for files */
|
||||
export type Tester = (entryInfo: EntryInfo) => boolean;
|
||||
export type Predicate = string[] | string | Tester;
|
||||
export declare const EntryTypes: {
|
||||
readonly FILE_TYPE: "files";
|
||||
readonly DIR_TYPE: "directories";
|
||||
readonly FILE_DIR_TYPE: "files_directories";
|
||||
readonly EVERYTHING_TYPE: "all";
|
||||
};
|
||||
export type EntryType = (typeof EntryTypes)[keyof typeof EntryTypes];
|
||||
/**
|
||||
* Options for readdirp.
|
||||
* * type: files, directories, or both
|
||||
* * lstat: whether to use symlink-friendly stat
|
||||
* * depth: max depth
|
||||
* * alwaysStat: whether to use stat (more resources) or dirent
|
||||
* * highWaterMark: streaming param, specifies max amount of resources per entry
|
||||
*/
|
||||
export type ReaddirpOptions = {
|
||||
root: string;
|
||||
fileFilter?: Predicate;
|
||||
directoryFilter?: Predicate;
|
||||
type?: EntryType;
|
||||
lstat?: boolean;
|
||||
depth?: number;
|
||||
alwaysStat?: boolean;
|
||||
highWaterMark?: number;
|
||||
};
|
||||
/** Directory entry. Contains path, depth count, and files. */
|
||||
export interface DirEntry {
|
||||
files: PathOrDirent[];
|
||||
depth: number;
|
||||
path: Path;
|
||||
}
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
export declare class ReaddirpStream extends Readable {
|
||||
parents: any[];
|
||||
reading: boolean;
|
||||
parent?: DirEntry;
|
||||
_stat: Function;
|
||||
_maxDepth: number;
|
||||
_wantsDir: boolean;
|
||||
_wantsFile: boolean;
|
||||
_wantsEverything: boolean;
|
||||
_root: Path;
|
||||
_isDirent: boolean;
|
||||
_statsProp: 'dirent' | 'stats';
|
||||
_rdOptions: {
|
||||
encoding: 'utf8';
|
||||
withFileTypes: boolean;
|
||||
};
|
||||
_fileFilter: Tester;
|
||||
_directoryFilter: Tester;
|
||||
constructor(options?: Partial<ReaddirpOptions>);
|
||||
_read(batch: number): Promise<void>;
|
||||
_exploreDir(path: Path, depth: number): Promise<{
|
||||
files: string[] | undefined;
|
||||
depth: number;
|
||||
path: string;
|
||||
}>;
|
||||
_formatEntry(dirent: PathOrDirent, path: Path): Promise<EntryInfo | undefined>;
|
||||
_onError(err: Error): void;
|
||||
_getEntryType(entry: EntryInfo): Promise<void | '' | 'file' | 'directory'>;
|
||||
_includeAsFile(entry: EntryInfo): boolean | undefined;
|
||||
}
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
export declare function readdirp(root: Path, options?: Partial<ReaddirpOptions>): ReaddirpStream;
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
export declare function readdirpPromise(root: Path, options?: Partial<ReaddirpOptions>): Promise<EntryInfo[]>;
|
||||
export default readdirp;
|
257
node_modules/readdirp/esm/index.js
generated
vendored
Normal file
257
node_modules/readdirp/esm/index.js
generated
vendored
Normal file
|
@ -0,0 +1,257 @@
|
|||
import { stat, lstat, readdir, realpath } from 'node:fs/promises';
|
||||
import { Readable } from 'node:stream';
|
||||
import { resolve as presolve, relative as prelative, join as pjoin, sep as psep } from 'node:path';
|
||||
export const EntryTypes = {
|
||||
FILE_TYPE: 'files',
|
||||
DIR_TYPE: 'directories',
|
||||
FILE_DIR_TYPE: 'files_directories',
|
||||
EVERYTHING_TYPE: 'all',
|
||||
};
|
||||
const defaultOptions = {
|
||||
root: '.',
|
||||
fileFilter: (_entryInfo) => true,
|
||||
directoryFilter: (_entryInfo) => true,
|
||||
type: EntryTypes.FILE_TYPE,
|
||||
lstat: false,
|
||||
depth: 2147483648,
|
||||
alwaysStat: false,
|
||||
highWaterMark: 4096,
|
||||
};
|
||||
Object.freeze(defaultOptions);
|
||||
const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR';
|
||||
const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]);
|
||||
const ALL_TYPES = [
|
||||
EntryTypes.DIR_TYPE,
|
||||
EntryTypes.EVERYTHING_TYPE,
|
||||
EntryTypes.FILE_DIR_TYPE,
|
||||
EntryTypes.FILE_TYPE,
|
||||
];
|
||||
const DIR_TYPES = new Set([
|
||||
EntryTypes.DIR_TYPE,
|
||||
EntryTypes.EVERYTHING_TYPE,
|
||||
EntryTypes.FILE_DIR_TYPE,
|
||||
]);
|
||||
const FILE_TYPES = new Set([
|
||||
EntryTypes.EVERYTHING_TYPE,
|
||||
EntryTypes.FILE_DIR_TYPE,
|
||||
EntryTypes.FILE_TYPE,
|
||||
]);
|
||||
const isNormalFlowError = (error) => NORMAL_FLOW_ERRORS.has(error.code);
|
||||
const wantBigintFsStats = process.platform === 'win32';
|
||||
const emptyFn = (_entryInfo) => true;
|
||||
const normalizeFilter = (filter) => {
|
||||
if (filter === undefined)
|
||||
return emptyFn;
|
||||
if (typeof filter === 'function')
|
||||
return filter;
|
||||
if (typeof filter === 'string') {
|
||||
const fl = filter.trim();
|
||||
return (entry) => entry.basename === fl;
|
||||
}
|
||||
if (Array.isArray(filter)) {
|
||||
const trItems = filter.map((item) => item.trim());
|
||||
return (entry) => trItems.some((f) => entry.basename === f);
|
||||
}
|
||||
return emptyFn;
|
||||
};
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
export class ReaddirpStream extends Readable {
|
||||
constructor(options = {}) {
|
||||
super({
|
||||
objectMode: true,
|
||||
autoDestroy: true,
|
||||
highWaterMark: options.highWaterMark,
|
||||
});
|
||||
const opts = { ...defaultOptions, ...options };
|
||||
const { root, type } = opts;
|
||||
this._fileFilter = normalizeFilter(opts.fileFilter);
|
||||
this._directoryFilter = normalizeFilter(opts.directoryFilter);
|
||||
const statMethod = opts.lstat ? lstat : stat;
|
||||
// Use bigint stats if it's windows and stat() supports options (node 10+).
|
||||
if (wantBigintFsStats) {
|
||||
this._stat = (path) => statMethod(path, { bigint: true });
|
||||
}
|
||||
else {
|
||||
this._stat = statMethod;
|
||||
}
|
||||
this._maxDepth = opts.depth ?? defaultOptions.depth;
|
||||
this._wantsDir = type ? DIR_TYPES.has(type) : false;
|
||||
this._wantsFile = type ? FILE_TYPES.has(type) : false;
|
||||
this._wantsEverything = type === EntryTypes.EVERYTHING_TYPE;
|
||||
this._root = presolve(root);
|
||||
this._isDirent = !opts.alwaysStat;
|
||||
this._statsProp = this._isDirent ? 'dirent' : 'stats';
|
||||
this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent };
|
||||
// Launch stream with one parent, the root dir.
|
||||
this.parents = [this._exploreDir(root, 1)];
|
||||
this.reading = false;
|
||||
this.parent = undefined;
|
||||
}
|
||||
async _read(batch) {
|
||||
if (this.reading)
|
||||
return;
|
||||
this.reading = true;
|
||||
try {
|
||||
while (!this.destroyed && batch > 0) {
|
||||
const par = this.parent;
|
||||
const fil = par && par.files;
|
||||
if (fil && fil.length > 0) {
|
||||
const { path, depth } = par;
|
||||
const slice = fil.splice(0, batch).map((dirent) => this._formatEntry(dirent, path));
|
||||
const awaited = await Promise.all(slice);
|
||||
for (const entry of awaited) {
|
||||
if (!entry)
|
||||
continue;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
const entryType = await this._getEntryType(entry);
|
||||
if (entryType === 'directory' && this._directoryFilter(entry)) {
|
||||
if (depth <= this._maxDepth) {
|
||||
this.parents.push(this._exploreDir(entry.fullPath, depth + 1));
|
||||
}
|
||||
if (this._wantsDir) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
else if ((entryType === 'file' || this._includeAsFile(entry)) &&
|
||||
this._fileFilter(entry)) {
|
||||
if (this._wantsFile) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const parent = this.parents.pop();
|
||||
if (!parent) {
|
||||
this.push(null);
|
||||
break;
|
||||
}
|
||||
this.parent = await parent;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this.destroy(error);
|
||||
}
|
||||
finally {
|
||||
this.reading = false;
|
||||
}
|
||||
}
|
||||
async _exploreDir(path, depth) {
|
||||
let files;
|
||||
try {
|
||||
files = await readdir(path, this._rdOptions);
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
}
|
||||
return { files, depth, path };
|
||||
}
|
||||
async _formatEntry(dirent, path) {
|
||||
let entry;
|
||||
const basename = this._isDirent ? dirent.name : dirent;
|
||||
try {
|
||||
const fullPath = presolve(pjoin(path, basename));
|
||||
entry = { path: prelative(this._root, fullPath), fullPath, basename };
|
||||
entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
this._onError(err);
|
||||
return;
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
_onError(err) {
|
||||
if (isNormalFlowError(err) && !this.destroyed) {
|
||||
this.emit('warn', err);
|
||||
}
|
||||
else {
|
||||
this.destroy(err);
|
||||
}
|
||||
}
|
||||
async _getEntryType(entry) {
|
||||
// entry may be undefined, because a warning or an error were emitted
|
||||
// and the statsProp is undefined
|
||||
if (!entry && this._statsProp in entry) {
|
||||
return '';
|
||||
}
|
||||
const stats = entry[this._statsProp];
|
||||
if (stats.isFile())
|
||||
return 'file';
|
||||
if (stats.isDirectory())
|
||||
return 'directory';
|
||||
if (stats && stats.isSymbolicLink()) {
|
||||
const full = entry.fullPath;
|
||||
try {
|
||||
const entryRealPath = await realpath(full);
|
||||
const entryRealPathStats = await lstat(entryRealPath);
|
||||
if (entryRealPathStats.isFile()) {
|
||||
return 'file';
|
||||
}
|
||||
if (entryRealPathStats.isDirectory()) {
|
||||
const len = entryRealPath.length;
|
||||
if (full.startsWith(entryRealPath) && full.substr(len, 1) === psep) {
|
||||
const recursiveError = new Error(`Circular symlink detected: "${full}" points to "${entryRealPath}"`);
|
||||
// @ts-ignore
|
||||
recursiveError.code = RECURSIVE_ERROR_CODE;
|
||||
return this._onError(recursiveError);
|
||||
}
|
||||
return 'directory';
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
_includeAsFile(entry) {
|
||||
const stats = entry && entry[this._statsProp];
|
||||
return stats && this._wantsEverything && !stats.isDirectory();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
export function readdirp(root, options = {}) {
|
||||
// @ts-ignore
|
||||
let type = options.entryType || options.type;
|
||||
if (type === 'both')
|
||||
type = EntryTypes.FILE_DIR_TYPE; // backwards-compatibility
|
||||
if (type)
|
||||
options.type = type;
|
||||
if (!root) {
|
||||
throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (typeof root !== 'string') {
|
||||
throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (type && !ALL_TYPES.includes(type)) {
|
||||
throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`);
|
||||
}
|
||||
options.root = root;
|
||||
return new ReaddirpStream(options);
|
||||
}
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
export function readdirpPromise(root, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const files = [];
|
||||
readdirp(root, options)
|
||||
.on('data', (entry) => files.push(entry))
|
||||
.on('end', () => resolve(files))
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
}
|
||||
export default readdirp;
|
1
node_modules/readdirp/esm/package.json
generated
vendored
Normal file
1
node_modules/readdirp/esm/package.json
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
{ "type": "module", "sideEffects": false }
|
108
node_modules/readdirp/index.d.ts
generated
vendored
Normal file
108
node_modules/readdirp/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
/**
|
||||
* Recursive version of readdir. Exposes a streaming API and promise API.
|
||||
* Streaming API allows to use a small amount of RAM.
|
||||
*
|
||||
* @module
|
||||
* @example
|
||||
```js
|
||||
import readdirp from 'readdirp';
|
||||
for await (const entry of readdirp('.')) {
|
||||
const {path} = entry;
|
||||
console.log(`${JSON.stringify({path})}`);
|
||||
}
|
||||
```
|
||||
*/
|
||||
/*! readdirp - MIT License (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) */
|
||||
import type { Stats, Dirent } from 'node:fs';
|
||||
import { Readable } from 'node:stream';
|
||||
/** Path in file system. */
|
||||
export type Path = string;
|
||||
/** Emitted entry. Contains relative & absolute path, basename, and either stats or dirent. */
|
||||
export interface EntryInfo {
|
||||
path: string;
|
||||
fullPath: string;
|
||||
stats?: Stats;
|
||||
dirent?: Dirent;
|
||||
basename: string;
|
||||
}
|
||||
/** Path or dir entries (files) */
|
||||
export type PathOrDirent = Dirent | Path;
|
||||
/** Filterer for files */
|
||||
export type Tester = (entryInfo: EntryInfo) => boolean;
|
||||
export type Predicate = string[] | string | Tester;
|
||||
export declare const EntryTypes: {
|
||||
readonly FILE_TYPE: "files";
|
||||
readonly DIR_TYPE: "directories";
|
||||
readonly FILE_DIR_TYPE: "files_directories";
|
||||
readonly EVERYTHING_TYPE: "all";
|
||||
};
|
||||
export type EntryType = (typeof EntryTypes)[keyof typeof EntryTypes];
|
||||
/**
|
||||
* Options for readdirp.
|
||||
* * type: files, directories, or both
|
||||
* * lstat: whether to use symlink-friendly stat
|
||||
* * depth: max depth
|
||||
* * alwaysStat: whether to use stat (more resources) or dirent
|
||||
* * highWaterMark: streaming param, specifies max amount of resources per entry
|
||||
*/
|
||||
export type ReaddirpOptions = {
|
||||
root: string;
|
||||
fileFilter?: Predicate;
|
||||
directoryFilter?: Predicate;
|
||||
type?: EntryType;
|
||||
lstat?: boolean;
|
||||
depth?: number;
|
||||
alwaysStat?: boolean;
|
||||
highWaterMark?: number;
|
||||
};
|
||||
/** Directory entry. Contains path, depth count, and files. */
|
||||
export interface DirEntry {
|
||||
files: PathOrDirent[];
|
||||
depth: number;
|
||||
path: Path;
|
||||
}
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
export declare class ReaddirpStream extends Readable {
|
||||
parents: any[];
|
||||
reading: boolean;
|
||||
parent?: DirEntry;
|
||||
_stat: Function;
|
||||
_maxDepth: number;
|
||||
_wantsDir: boolean;
|
||||
_wantsFile: boolean;
|
||||
_wantsEverything: boolean;
|
||||
_root: Path;
|
||||
_isDirent: boolean;
|
||||
_statsProp: 'dirent' | 'stats';
|
||||
_rdOptions: {
|
||||
encoding: 'utf8';
|
||||
withFileTypes: boolean;
|
||||
};
|
||||
_fileFilter: Tester;
|
||||
_directoryFilter: Tester;
|
||||
constructor(options?: Partial<ReaddirpOptions>);
|
||||
_read(batch: number): Promise<void>;
|
||||
_exploreDir(path: Path, depth: number): Promise<{
|
||||
files: string[] | undefined;
|
||||
depth: number;
|
||||
path: string;
|
||||
}>;
|
||||
_formatEntry(dirent: PathOrDirent, path: Path): Promise<EntryInfo | undefined>;
|
||||
_onError(err: Error): void;
|
||||
_getEntryType(entry: EntryInfo): Promise<void | '' | 'file' | 'directory'>;
|
||||
_includeAsFile(entry: EntryInfo): boolean | undefined;
|
||||
}
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
export declare function readdirp(root: Path, options?: Partial<ReaddirpOptions>): ReaddirpStream;
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
export declare function readdirpPromise(root: Path, options?: Partial<ReaddirpOptions>): Promise<EntryInfo[]>;
|
||||
export default readdirp;
|
263
node_modules/readdirp/index.js
generated
vendored
Normal file
263
node_modules/readdirp/index.js
generated
vendored
Normal file
|
@ -0,0 +1,263 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ReaddirpStream = exports.EntryTypes = void 0;
|
||||
exports.readdirp = readdirp;
|
||||
exports.readdirpPromise = readdirpPromise;
|
||||
const promises_1 = require("node:fs/promises");
|
||||
const node_stream_1 = require("node:stream");
|
||||
const node_path_1 = require("node:path");
|
||||
exports.EntryTypes = {
|
||||
FILE_TYPE: 'files',
|
||||
DIR_TYPE: 'directories',
|
||||
FILE_DIR_TYPE: 'files_directories',
|
||||
EVERYTHING_TYPE: 'all',
|
||||
};
|
||||
const defaultOptions = {
|
||||
root: '.',
|
||||
fileFilter: (_entryInfo) => true,
|
||||
directoryFilter: (_entryInfo) => true,
|
||||
type: exports.EntryTypes.FILE_TYPE,
|
||||
lstat: false,
|
||||
depth: 2147483648,
|
||||
alwaysStat: false,
|
||||
highWaterMark: 4096,
|
||||
};
|
||||
Object.freeze(defaultOptions);
|
||||
const RECURSIVE_ERROR_CODE = 'READDIRP_RECURSIVE_ERROR';
|
||||
const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP', RECURSIVE_ERROR_CODE]);
|
||||
const ALL_TYPES = [
|
||||
exports.EntryTypes.DIR_TYPE,
|
||||
exports.EntryTypes.EVERYTHING_TYPE,
|
||||
exports.EntryTypes.FILE_DIR_TYPE,
|
||||
exports.EntryTypes.FILE_TYPE,
|
||||
];
|
||||
const DIR_TYPES = new Set([
|
||||
exports.EntryTypes.DIR_TYPE,
|
||||
exports.EntryTypes.EVERYTHING_TYPE,
|
||||
exports.EntryTypes.FILE_DIR_TYPE,
|
||||
]);
|
||||
const FILE_TYPES = new Set([
|
||||
exports.EntryTypes.EVERYTHING_TYPE,
|
||||
exports.EntryTypes.FILE_DIR_TYPE,
|
||||
exports.EntryTypes.FILE_TYPE,
|
||||
]);
|
||||
const isNormalFlowError = (error) => NORMAL_FLOW_ERRORS.has(error.code);
|
||||
const wantBigintFsStats = process.platform === 'win32';
|
||||
const emptyFn = (_entryInfo) => true;
|
||||
const normalizeFilter = (filter) => {
|
||||
if (filter === undefined)
|
||||
return emptyFn;
|
||||
if (typeof filter === 'function')
|
||||
return filter;
|
||||
if (typeof filter === 'string') {
|
||||
const fl = filter.trim();
|
||||
return (entry) => entry.basename === fl;
|
||||
}
|
||||
if (Array.isArray(filter)) {
|
||||
const trItems = filter.map((item) => item.trim());
|
||||
return (entry) => trItems.some((f) => entry.basename === f);
|
||||
}
|
||||
return emptyFn;
|
||||
};
|
||||
/** Readable readdir stream, emitting new files as they're being listed. */
|
||||
class ReaddirpStream extends node_stream_1.Readable {
|
||||
constructor(options = {}) {
|
||||
super({
|
||||
objectMode: true,
|
||||
autoDestroy: true,
|
||||
highWaterMark: options.highWaterMark,
|
||||
});
|
||||
const opts = { ...defaultOptions, ...options };
|
||||
const { root, type } = opts;
|
||||
this._fileFilter = normalizeFilter(opts.fileFilter);
|
||||
this._directoryFilter = normalizeFilter(opts.directoryFilter);
|
||||
const statMethod = opts.lstat ? promises_1.lstat : promises_1.stat;
|
||||
// Use bigint stats if it's windows and stat() supports options (node 10+).
|
||||
if (wantBigintFsStats) {
|
||||
this._stat = (path) => statMethod(path, { bigint: true });
|
||||
}
|
||||
else {
|
||||
this._stat = statMethod;
|
||||
}
|
||||
this._maxDepth = opts.depth ?? defaultOptions.depth;
|
||||
this._wantsDir = type ? DIR_TYPES.has(type) : false;
|
||||
this._wantsFile = type ? FILE_TYPES.has(type) : false;
|
||||
this._wantsEverything = type === exports.EntryTypes.EVERYTHING_TYPE;
|
||||
this._root = (0, node_path_1.resolve)(root);
|
||||
this._isDirent = !opts.alwaysStat;
|
||||
this._statsProp = this._isDirent ? 'dirent' : 'stats';
|
||||
this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent };
|
||||
// Launch stream with one parent, the root dir.
|
||||
this.parents = [this._exploreDir(root, 1)];
|
||||
this.reading = false;
|
||||
this.parent = undefined;
|
||||
}
|
||||
async _read(batch) {
|
||||
if (this.reading)
|
||||
return;
|
||||
this.reading = true;
|
||||
try {
|
||||
while (!this.destroyed && batch > 0) {
|
||||
const par = this.parent;
|
||||
const fil = par && par.files;
|
||||
if (fil && fil.length > 0) {
|
||||
const { path, depth } = par;
|
||||
const slice = fil.splice(0, batch).map((dirent) => this._formatEntry(dirent, path));
|
||||
const awaited = await Promise.all(slice);
|
||||
for (const entry of awaited) {
|
||||
if (!entry)
|
||||
continue;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
const entryType = await this._getEntryType(entry);
|
||||
if (entryType === 'directory' && this._directoryFilter(entry)) {
|
||||
if (depth <= this._maxDepth) {
|
||||
this.parents.push(this._exploreDir(entry.fullPath, depth + 1));
|
||||
}
|
||||
if (this._wantsDir) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
else if ((entryType === 'file' || this._includeAsFile(entry)) &&
|
||||
this._fileFilter(entry)) {
|
||||
if (this._wantsFile) {
|
||||
this.push(entry);
|
||||
batch--;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const parent = this.parents.pop();
|
||||
if (!parent) {
|
||||
this.push(null);
|
||||
break;
|
||||
}
|
||||
this.parent = await parent;
|
||||
if (this.destroyed)
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this.destroy(error);
|
||||
}
|
||||
finally {
|
||||
this.reading = false;
|
||||
}
|
||||
}
|
||||
async _exploreDir(path, depth) {
|
||||
let files;
|
||||
try {
|
||||
files = await (0, promises_1.readdir)(path, this._rdOptions);
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
}
|
||||
return { files, depth, path };
|
||||
}
|
||||
async _formatEntry(dirent, path) {
|
||||
let entry;
|
||||
const basename = this._isDirent ? dirent.name : dirent;
|
||||
try {
|
||||
const fullPath = (0, node_path_1.resolve)((0, node_path_1.join)(path, basename));
|
||||
entry = { path: (0, node_path_1.relative)(this._root, fullPath), fullPath, basename };
|
||||
entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath);
|
||||
}
|
||||
catch (err) {
|
||||
this._onError(err);
|
||||
return;
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
_onError(err) {
|
||||
if (isNormalFlowError(err) && !this.destroyed) {
|
||||
this.emit('warn', err);
|
||||
}
|
||||
else {
|
||||
this.destroy(err);
|
||||
}
|
||||
}
|
||||
async _getEntryType(entry) {
|
||||
// entry may be undefined, because a warning or an error were emitted
|
||||
// and the statsProp is undefined
|
||||
if (!entry && this._statsProp in entry) {
|
||||
return '';
|
||||
}
|
||||
const stats = entry[this._statsProp];
|
||||
if (stats.isFile())
|
||||
return 'file';
|
||||
if (stats.isDirectory())
|
||||
return 'directory';
|
||||
if (stats && stats.isSymbolicLink()) {
|
||||
const full = entry.fullPath;
|
||||
try {
|
||||
const entryRealPath = await (0, promises_1.realpath)(full);
|
||||
const entryRealPathStats = await (0, promises_1.lstat)(entryRealPath);
|
||||
if (entryRealPathStats.isFile()) {
|
||||
return 'file';
|
||||
}
|
||||
if (entryRealPathStats.isDirectory()) {
|
||||
const len = entryRealPath.length;
|
||||
if (full.startsWith(entryRealPath) && full.substr(len, 1) === node_path_1.sep) {
|
||||
const recursiveError = new Error(`Circular symlink detected: "${full}" points to "${entryRealPath}"`);
|
||||
// @ts-ignore
|
||||
recursiveError.code = RECURSIVE_ERROR_CODE;
|
||||
return this._onError(recursiveError);
|
||||
}
|
||||
return 'directory';
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this._onError(error);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
_includeAsFile(entry) {
|
||||
const stats = entry && entry[this._statsProp];
|
||||
return stats && this._wantsEverything && !stats.isDirectory();
|
||||
}
|
||||
}
|
||||
exports.ReaddirpStream = ReaddirpStream;
|
||||
/**
|
||||
* Streaming version: Reads all files and directories in given root recursively.
|
||||
* Consumes ~constant small amount of RAM.
|
||||
* @param root Root directory
|
||||
* @param options Options to specify root (start directory), filters and recursion depth
|
||||
*/
|
||||
function readdirp(root, options = {}) {
|
||||
// @ts-ignore
|
||||
let type = options.entryType || options.type;
|
||||
if (type === 'both')
|
||||
type = exports.EntryTypes.FILE_DIR_TYPE; // backwards-compatibility
|
||||
if (type)
|
||||
options.type = type;
|
||||
if (!root) {
|
||||
throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (typeof root !== 'string') {
|
||||
throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)');
|
||||
}
|
||||
else if (type && !ALL_TYPES.includes(type)) {
|
||||
throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`);
|
||||
}
|
||||
options.root = root;
|
||||
return new ReaddirpStream(options);
|
||||
}
|
||||
/**
|
||||
* Promise version: Reads all files and directories in given root recursively.
|
||||
* Compared to streaming version, will consume a lot of RAM e.g. when 1 million files are listed.
|
||||
* @returns array of paths and their entry infos
|
||||
*/
|
||||
function readdirpPromise(root, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const files = [];
|
||||
readdirp(root, options)
|
||||
.on('data', (entry) => files.push(entry))
|
||||
.on('end', () => resolve(files))
|
||||
.on('error', (error) => reject(error));
|
||||
});
|
||||
}
|
||||
exports.default = readdirp;
|
70
node_modules/readdirp/package.json
generated
vendored
Normal file
70
node_modules/readdirp/package.json
generated
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
"name": "readdirp",
|
||||
"description": "Recursive version of fs.readdir with small RAM & CPU footprint.",
|
||||
"version": "4.1.2",
|
||||
"homepage": "https://github.com/paulmillr/readdirp",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/paulmillr/readdirp.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/paulmillr/readdirp/issues"
|
||||
},
|
||||
"author": "Thorsten Lorenz <thlorenz@gmx.de> (thlorenz.com)",
|
||||
"contributors": [
|
||||
"Thorsten Lorenz <thlorenz@gmx.de> (thlorenz.com)",
|
||||
"Paul Miller (https://paulmillr.com)"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 14.18.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"index.d.ts.map",
|
||||
"index.js.map",
|
||||
"esm"
|
||||
],
|
||||
"main": "./index.js",
|
||||
"module": "./esm/index.js",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./esm/index.js",
|
||||
"require": "./index.js"
|
||||
}
|
||||
},
|
||||
"sideEffects": false,
|
||||
"keywords": [
|
||||
"recursive",
|
||||
"fs",
|
||||
"stream",
|
||||
"streams",
|
||||
"readdir",
|
||||
"filesystem",
|
||||
"find",
|
||||
"filter"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && tsc -p tsconfig.cjs.json",
|
||||
"lint": "prettier --check index.ts test/index.test.js",
|
||||
"format": "prettier --write index.ts test/index.test.js",
|
||||
"test": "node test/index.test.js",
|
||||
"test:coverage": "c8 node test/index.test.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@paulmillr/jsbt": "0.3.1",
|
||||
"@types/node": "20.14.8",
|
||||
"c8": "10.1.3",
|
||||
"chai": "4.3.4",
|
||||
"chai-subset": "1.6.0",
|
||||
"micro-should": "0.5.0",
|
||||
"prettier": "3.1.1",
|
||||
"typescript": "5.5.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "individual",
|
||||
"url": "https://paulmillr.com/funding/"
|
||||
}
|
||||
}
|
5
package.json
Normal file
5
package.json
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"dependencies": {
|
||||
"chokidar": "^4.0.3"
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue