Initial commit
This commit is contained in:
commit
034af8e775
2
README.md
Normal file
2
README.md
Normal file
@ -0,0 +1,2 @@
|
||||
My portfolio website.
|
||||
<a href="https://f1rq.ovh" target="_blank">f1rq.ovh</a>
|
12
jsconfig.json
Normal file
12
jsconfig.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"extends": "./.svelte-kit/tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext"
|
||||
}
|
||||
}
|
32
node_modules/.svelte2tsx-language-server-files/svelte-native-jsx.d.ts
generated
vendored
Normal file
32
node_modules/.svelte2tsx-language-server-files/svelte-native-jsx.d.ts
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
declare namespace svelteNative.JSX {
|
||||
|
||||
// Every namespace eligible for use needs to implement the following two functions
|
||||
function mapElementTag(
|
||||
tag: string
|
||||
): any;
|
||||
|
||||
function createElement<Elements extends IntrinsicElements, Key extends keyof Elements>(
|
||||
element: Key | undefined | null, attrs: Elements[Key]
|
||||
): any;
|
||||
function createElement<Elements extends IntrinsicElements, Key extends keyof Elements, T>(
|
||||
element: Key | undefined | null, attrEnhancers: T, attrs: Elements[Key] & T
|
||||
): any;
|
||||
|
||||
|
||||
/* svelte specific */
|
||||
interface ElementClass {
|
||||
$$prop_def: any;
|
||||
}
|
||||
|
||||
interface ElementAttributesProperty {
|
||||
$$prop_def: any; // specify the property name to use
|
||||
}
|
||||
|
||||
// Add empty IntrinsicAttributes to prevent fallback to the one in the JSX namespace
|
||||
interface IntrinsicAttributes {
|
||||
}
|
||||
|
||||
interface IntrinsicElements {
|
||||
[name: string]: { [name: string]: any };
|
||||
}
|
||||
}
|
287
node_modules/.svelte2tsx-language-server-files/svelte-shims-v4.d.ts
generated
vendored
Normal file
287
node_modules/.svelte2tsx-language-server-files/svelte-shims-v4.d.ts
generated
vendored
Normal file
@ -0,0 +1,287 @@
|
||||
// Whenever a ambient declaration changes, its number should be increased
|
||||
// This way, we avoid the situation where multiple ambient versions of svelte2tsx
|
||||
// are loaded and their declarations conflict each other
|
||||
// See https://github.com/sveltejs/language-tools/issues/1059 for an example bug that stems from it
|
||||
// If you change anything in this file, think about whether or not it should be backported to svelte-shims.d.ts
|
||||
|
||||
type AConstructorTypeOf<T, U extends any[] = any[]> = new (...args: U) => T;
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteActionReturnType = {
|
||||
update?: (args: any) => void,
|
||||
destroy?: () => void
|
||||
} | void
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteTransitionConfig = {
|
||||
delay?: number,
|
||||
duration?: number,
|
||||
easing?: (t: number) => number,
|
||||
css?: (t: number, u: number) => string,
|
||||
tick?: (t: number, u: number) => void
|
||||
}
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteTransitionReturnType = SvelteTransitionConfig | (() => SvelteTransitionConfig)
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteAnimationReturnType = {
|
||||
delay?: number,
|
||||
duration?: number,
|
||||
easing?: (t: number) => number,
|
||||
css?: (t: number, u: number) => string,
|
||||
tick?: (t: number, u: number) => void
|
||||
}
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteWithOptionalProps<Props, Keys extends keyof Props> = Omit<Props, Keys> & Partial<Pick<Props, Keys>>;
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteAllProps = { [index: string]: any }
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SveltePropsAnyFallback<Props> = {[K in keyof Props]: Props[K] extends never ? never : Props[K] extends undefined ? any : Props[K]}
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteSlotsAnyFallback<Slots> = {[K in keyof Slots]: {[S in keyof Slots[K]]: Slots[K][S] extends undefined ? any : Slots[K][S]}}
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteRestProps = { [index: string]: any }
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteSlots = { [index: string]: any }
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type SvelteStore<T> = { subscribe: (run: (value: T) => any, invalidate?: any) => any }
|
||||
|
||||
// Forces TypeScript to look into the type which results in a better representation of it
|
||||
// which helps for error messages and is necessary for d.ts file transformation so that
|
||||
// no ambient type references are left in the output
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type Expand<T> = T extends infer O ? { [K in keyof O]: O[K] } : never;
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type KeysMatching<Obj, V> = {[K in keyof Obj]-?: Obj[K] extends V ? K : never}[keyof Obj]
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
declare type __sveltets_2_CustomEvents<T> = {[K in KeysMatching<T, CustomEvent>]: T[K] extends CustomEvent ? T[K]['detail']: T[K]}
|
||||
|
||||
declare function __sveltets_2_ensureRightProps<Props>(props: Props): {};
|
||||
declare function __sveltets_2_instanceOf<T = any>(type: AConstructorTypeOf<T>): T;
|
||||
declare function __sveltets_2_allPropsType(): SvelteAllProps
|
||||
declare function __sveltets_2_restPropsType(): SvelteRestProps
|
||||
declare function __sveltets_2_slotsType<Slots, Key extends keyof Slots>(slots: Slots): Record<Key, boolean>;
|
||||
|
||||
// Overload of the following two functions is necessary.
|
||||
// An empty array of optionalProps makes OptionalProps type any, which means we lose the prop typing.
|
||||
// optionalProps need to be first or its type cannot be infered correctly.
|
||||
|
||||
declare function __sveltets_2_partial<Props = {}, Events = {}, Slots = {}, Exports = {}, Bindings = string>(
|
||||
render: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
): {props: Expand<SveltePropsAnyFallback<Props>>, events: Events, slots: Expand<SvelteSlotsAnyFallback<Slots>>, exports?: Exports, bindings?: Bindings }
|
||||
declare function __sveltets_2_partial<Props = {}, Events = {}, Slots = {}, Exports = {}, Bindings = string, OptionalProps extends keyof Props = any>(
|
||||
optionalProps: OptionalProps[],
|
||||
render: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
): {props: Expand<SvelteWithOptionalProps<SveltePropsAnyFallback<Props>, OptionalProps>>, events: Events, slots: Expand<SvelteSlotsAnyFallback<Slots>>, exports?: Exports, bindings?: Bindings }
|
||||
|
||||
declare function __sveltets_2_partial_with_any<Props = {}, Events = {}, Slots = {}, Exports = {}, Bindings = string>(
|
||||
render: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
): {props: Expand<SveltePropsAnyFallback<Props> & SvelteAllProps>, events: Events, slots: Expand<SvelteSlotsAnyFallback<Slots>>, exports?: Exports, bindings?: Bindings }
|
||||
declare function __sveltets_2_partial_with_any<Props = {}, Events = {}, Slots = {}, Exports = {}, Bindings = string, OptionalProps extends keyof Props = any>(
|
||||
optionalProps: OptionalProps[],
|
||||
render: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
): {props: Expand<SvelteWithOptionalProps<SveltePropsAnyFallback<Props>, OptionalProps> & SvelteAllProps>, events: Events, slots: Expand<SvelteSlotsAnyFallback<Slots>>, exports?: Exports, bindings?: Bindings }
|
||||
|
||||
|
||||
declare function __sveltets_2_with_any<Props = {}, Events = {}, Slots = {}, Exports = {}, Bindings = string>(
|
||||
render: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
): {props: Expand<Props & SvelteAllProps>, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
|
||||
declare function __sveltets_2_with_any_event<Props = {}, Events = {}, Slots = {}, Exports = {}, Bindings = string>(
|
||||
render: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
): {props: Props, events: Events & {[evt: string]: CustomEvent<any>;}, slots: Slots, exports?: Exports, bindings?: Bindings }
|
||||
|
||||
declare function __sveltets_2_store_get<T = any>(store: SvelteStore<T>): T
|
||||
declare function __sveltets_2_store_get<Store extends SvelteStore<any> | undefined | null>(store: Store): Store extends SvelteStore<infer T> ? T : Store;
|
||||
declare function __sveltets_2_any(dummy: any): any;
|
||||
declare function __sveltets_2_invalidate<T>(getValue: () => T): T
|
||||
|
||||
declare function __sveltets_2_mapWindowEvent<K extends keyof HTMLBodyElementEventMap>(
|
||||
event: K
|
||||
): HTMLBodyElementEventMap[K];
|
||||
declare function __sveltets_2_mapBodyEvent<K extends keyof WindowEventMap>(
|
||||
event: K
|
||||
): WindowEventMap[K];
|
||||
declare function __sveltets_2_mapElementEvent<K extends keyof HTMLElementEventMap>(
|
||||
event: K
|
||||
): HTMLElementEventMap[K];
|
||||
|
||||
declare function __sveltets_2_bubbleEventDef<Events, K extends keyof Events>(
|
||||
events: Events, eventKey: K
|
||||
): Events[K];
|
||||
declare function __sveltets_2_bubbleEventDef(
|
||||
events: any, eventKey: string
|
||||
): any;
|
||||
|
||||
declare const __sveltets_2_customEvent: CustomEvent<any>;
|
||||
declare function __sveltets_2_toEventTypings<Typings>(): {[Key in keyof Typings]: CustomEvent<Typings[Key]>};
|
||||
|
||||
declare function __sveltets_2_unionType<T1, T2>(t1: T1, t2: T2): T1 | T2;
|
||||
declare function __sveltets_2_unionType<T1, T2, T3>(t1: T1, t2: T2, t3: T3): T1 | T2 | T3;
|
||||
declare function __sveltets_2_unionType<T1, T2, T3, T4>(t1: T1, t2: T2, t3: T3, t4: T4): T1 | T2 | T3 | T4;
|
||||
declare function __sveltets_2_unionType(...types: any[]): any;
|
||||
|
||||
declare function __sveltets_2_createSvelte2TsxComponent<Props extends {}, Events extends {}, Slots extends {}>(
|
||||
render: {props: Props, events: Events, slots: Slots }
|
||||
): typeof import("svelte").SvelteComponent<Props, Events, Slots>;
|
||||
|
||||
declare function __sveltets_2_unwrapArr<T>(arr: ArrayLike<T>): T
|
||||
declare function __sveltets_2_unwrapPromiseLike<T>(promise: PromiseLike<T> | T): T
|
||||
|
||||
// v2
|
||||
declare function __sveltets_2_createCreateSlot<Slots = Record<string, Record<string, any>>>(): <SlotName extends keyof Slots>(slotName: SlotName, attrs: Slots[SlotName]) => Record<string, any>;
|
||||
declare function __sveltets_2_createComponentAny(props: Record<string, any>): import("svelte").SvelteComponent<any, any, any>;
|
||||
|
||||
declare function __sveltets_2_any(...dummy: any[]): any;
|
||||
declare function __sveltets_2_empty(...dummy: any[]): {};
|
||||
declare function __sveltets_2_union<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10>(t1:T1,t2?:T2,t3?:T3,t4?:T4,t5?:T5,t6?:T6,t7?:T7,t8?:T8,t9?:T9,t10?:T10): T1 & T2 & T3 & T4 & T5 & T6 & T7 & T8 & T9 & T10;
|
||||
declare function __sveltets_2_nonNullable<T>(type: T): NonNullable<T>;
|
||||
|
||||
declare function __sveltets_2_cssProp(prop: Record<string, any>): {};
|
||||
|
||||
// @ts-ignore Svelte v3/v4 don't have this
|
||||
declare function __sveltets_2_ensureSnippet(val: ReturnType<import('svelte').Snippet> | undefined | null): any;
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type __sveltets_2_SvelteAnimationReturnType = {
|
||||
delay?: number,
|
||||
duration?: number,
|
||||
easing?: (t: number) => number,
|
||||
css?: (t: number, u: number) => string,
|
||||
tick?: (t: number, u: number) => void
|
||||
}
|
||||
declare var __sveltets_2_AnimationMove: { from: DOMRect, to: DOMRect }
|
||||
declare function __sveltets_2_ensureAnimation(animationCall: __sveltets_2_SvelteAnimationReturnType): {};
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type __sveltets_2_SvelteActionReturnType = {
|
||||
update?: (args: any) => void,
|
||||
destroy?: () => void,
|
||||
$$_attributes?: Record<string, any>,
|
||||
} | void
|
||||
declare function __sveltets_2_ensureAction<T extends __sveltets_2_SvelteActionReturnType>(actionCall: T): T extends {$$_attributes?: any} ? T['$$_attributes'] : {};
|
||||
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type __sveltets_2_SvelteTransitionConfig = {
|
||||
delay?: number,
|
||||
duration?: number,
|
||||
easing?: (t: number) => number,
|
||||
css?: (t: number, u: number) => string,
|
||||
tick?: (t: number, u: number) => void
|
||||
}
|
||||
/** @internal PRIVATE API, DO NOT USE */
|
||||
type __sveltets_2_SvelteTransitionReturnType = __sveltets_2_SvelteTransitionConfig | (() => __sveltets_2_SvelteTransitionConfig)
|
||||
declare function __sveltets_2_ensureTransition(transitionCall: __sveltets_2_SvelteTransitionReturnType): {};
|
||||
|
||||
// Includes undefined and null for all types as all usages also allow these
|
||||
declare function __sveltets_2_ensureType<T>(type: AConstructorTypeOf<T>, el: T | undefined | null): {};
|
||||
declare function __sveltets_2_ensureType<T1, T2>(type1: AConstructorTypeOf<T1>, type2: AConstructorTypeOf<T2>, el: T1 | T2 | undefined | null): {};
|
||||
|
||||
// The following is necessary because there are two clashing errors that can't be solved at the same time
|
||||
// when using Svelte2TsxComponent, more precisely the event typings in
|
||||
// __sveltets_2_ensureComponent<T extends new (..) => _SvelteComponent<any,||any||<-this,any>>(type: T): T;
|
||||
// If we type it as "any", we have an error when using sth like {a: CustomEvent<any>}
|
||||
// If we type it as "{}", we have an error when using sth like {[evt: string]: CustomEvent<any>}
|
||||
// If we type it as "unknown", we get all kinds of follow up errors which we want to avoid
|
||||
// Therefore introduce two more base classes just for this case.
|
||||
/**
|
||||
* Ambient type only used for intellisense, DO NOT USE IN YOUR PROJECT
|
||||
*/
|
||||
declare type ATypedSvelteComponent = {
|
||||
/**
|
||||
* @internal This is for type checking capabilities only
|
||||
* and does not exist at runtime. Don't use this property.
|
||||
*/
|
||||
$$prop_def: any;
|
||||
/**
|
||||
* @internal This is for type checking capabilities only
|
||||
* and does not exist at runtime. Don't use this property.
|
||||
*/
|
||||
$$events_def: any;
|
||||
/**
|
||||
* @internal This is for type checking capabilities only
|
||||
* and does not exist at runtime. Don't use this property.
|
||||
*/
|
||||
$$slot_def: any;
|
||||
|
||||
$on(event: string, handler: any): () => void;
|
||||
}
|
||||
/**
|
||||
* Ambient type only used for intellisense, DO NOT USE IN YOUR PROJECT.
|
||||
*
|
||||
* If you're looking for the type of a Svelte Component, use `SvelteComponent` and `ComponentType` instead:
|
||||
*
|
||||
* ```ts
|
||||
* import type { ComponentType, SvelteComponent } from "svelte";
|
||||
* let myComponentConstructor: ComponentType<SvelteComponent> = ..;
|
||||
* ```
|
||||
*/
|
||||
declare type ConstructorOfATypedSvelteComponent = new (args: {target: any, props?: any}) => ATypedSvelteComponent
|
||||
// Usage note: Cannot properly transform generic function components to class components due to TypeScript limitations
|
||||
declare function __sveltets_2_ensureComponent<
|
||||
T extends
|
||||
| ConstructorOfATypedSvelteComponent
|
||||
| (typeof import('svelte') extends { mount: any }
|
||||
? // @ts-ignore svelte.Component doesn't exist in Svelte 4
|
||||
import('svelte').Component<any, any, any>
|
||||
: never)
|
||||
| null
|
||||
| undefined
|
||||
>(
|
||||
type: T
|
||||
): NonNullable<
|
||||
T extends ConstructorOfATypedSvelteComponent
|
||||
? T
|
||||
: typeof import('svelte') extends { mount: any }
|
||||
? // @ts-ignore svelte.Component doesn't exist in Svelte 4
|
||||
T extends import('svelte').Component<
|
||||
infer Props extends Record<string, any>,
|
||||
infer Exports extends Record<string, any>,
|
||||
infer Bindings extends string
|
||||
>
|
||||
? new (
|
||||
options: import('svelte').ComponentConstructorOptions<Props>
|
||||
) => import('svelte').SvelteComponent<Props, Props['$$events'], Props['$$slots']> &
|
||||
Exports & { $$bindings: Bindings }
|
||||
: never
|
||||
: never
|
||||
>;
|
||||
|
||||
declare function __sveltets_2_ensureArray<T extends ArrayLike<unknown> | Iterable<unknown>>(array: T): T extends ArrayLike<infer U> ? U[] : T extends Iterable<infer U> ? Iterable<U> : any[];
|
||||
|
||||
type __sveltets_2_PropsWithChildren<Props, Slots> = Props &
|
||||
(Slots extends { default: any }
|
||||
// This is unfortunate because it means "accepts no props" turns into "accepts any prop"
|
||||
// but the alternative is non-fixable type errors because of the way TypeScript index
|
||||
// signatures work (they will always take precedence and make an impossible-to-satisfy children type).
|
||||
? Props extends Record<string, never>
|
||||
? any
|
||||
: { children?: any }
|
||||
: {});
|
||||
declare function __sveltets_2_runes_constructor<Props extends {}>(render: {props: Props }): import("svelte").ComponentConstructorOptions<Props>;
|
||||
|
||||
declare function __sveltets_2_get_set_binding<T>(get: (() => T) | null | undefined, set: (t: T) => void): T;
|
||||
|
||||
declare function __sveltets_$$bindings<Bindings extends string[]>(...bindings: Bindings): Bindings[number];
|
||||
|
||||
declare function __sveltets_2_fn_component<
|
||||
Props extends Record<string, any>, Exports extends Record<string, any>, Bindings extends string
|
||||
// @ts-ignore Svelte 5 only
|
||||
>(klass: {props: Props, exports?: Exports, bindings?: Bindings }): import('svelte').Component<Props, Exports, Bindings>;
|
||||
|
||||
interface __sveltets_2_IsomorphicComponent<Props extends Record<string, any> = any, Events extends Record<string, any> = any, Slots extends Record<string, any> = any, Exports = {}, Bindings = string> {
|
||||
new (options: import('svelte').ComponentConstructorOptions<Props>): import('svelte').SvelteComponent<Props, Events, Slots> & { $$bindings?: Bindings } & Exports;
|
||||
(internal: unknown, props: Props extends Record<string, never> ? {$$events?: Events, $$slots?: Slots} : Props & {$$events?: Events, $$slots?: Slots}): Exports & { $set?: any, $on?: any };
|
||||
z_$$bindings?: Bindings;
|
||||
}
|
||||
|
||||
declare function __sveltets_2_isomorphic_component<
|
||||
Props extends Record<string, any>, Events extends Record<string, any>, Slots extends Record<string, any>, Exports extends Record<string, any>, Bindings extends string
|
||||
>(klass: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }): __sveltets_2_IsomorphicComponent<Props, Events, Slots, Exports, Bindings>;
|
||||
|
||||
declare function __sveltets_2_isomorphic_component_slots<
|
||||
Props extends Record<string, any>, Events extends Record<string, any>, Slots extends Record<string, any>, Exports extends Record<string, any>, Bindings extends string
|
||||
>(klass: {props: Props, events: Events, slots: Slots, exports?: Exports, bindings?: Bindings }): __sveltets_2_IsomorphicComponent<__sveltets_2_PropsWithChildren<Props, Slots>, Events, Slots, Exports, Bindings>;
|
128
node_modules/@alloc/quick-lru/index.d.ts
generated
vendored
Normal file
128
node_modules/@alloc/quick-lru/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,128 @@
|
||||
declare namespace QuickLRU {
|
||||
interface Options<KeyType, ValueType> {
|
||||
/**
|
||||
The maximum number of milliseconds an item should remain in the cache.
|
||||
|
||||
@default Infinity
|
||||
|
||||
By default, `maxAge` will be `Infinity`, which means that items will never expire.
|
||||
Lazy expiration upon the next write or read call.
|
||||
|
||||
Individual expiration of an item can be specified by the `set(key, value, maxAge)` method.
|
||||
*/
|
||||
readonly maxAge?: number;
|
||||
|
||||
/**
|
||||
The maximum number of items before evicting the least recently used items.
|
||||
*/
|
||||
readonly maxSize: number;
|
||||
|
||||
/**
|
||||
Called right before an item is evicted from the cache.
|
||||
|
||||
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
|
||||
*/
|
||||
onEviction?: (key: KeyType, value: ValueType) => void;
|
||||
}
|
||||
}
|
||||
|
||||
declare class QuickLRU<KeyType, ValueType>
|
||||
implements Iterable<[KeyType, ValueType]> {
|
||||
/**
|
||||
The stored item count.
|
||||
*/
|
||||
readonly size: number;
|
||||
|
||||
/**
|
||||
Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29).
|
||||
|
||||
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
|
||||
|
||||
@example
|
||||
```
|
||||
import QuickLRU = require('quick-lru');
|
||||
|
||||
const lru = new QuickLRU({maxSize: 1000});
|
||||
|
||||
lru.set('🦄', '🌈');
|
||||
|
||||
lru.has('🦄');
|
||||
//=> true
|
||||
|
||||
lru.get('🦄');
|
||||
//=> '🌈'
|
||||
```
|
||||
*/
|
||||
constructor(options: QuickLRU.Options<KeyType, ValueType>);
|
||||
|
||||
[Symbol.iterator](): IterableIterator<[KeyType, ValueType]>;
|
||||
|
||||
/**
|
||||
Set an item. Returns the instance.
|
||||
|
||||
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified in the constructor, otherwise the item will never expire.
|
||||
|
||||
@returns The list instance.
|
||||
*/
|
||||
set(key: KeyType, value: ValueType, options?: {maxAge?: number}): this;
|
||||
|
||||
/**
|
||||
Get an item.
|
||||
|
||||
@returns The stored item or `undefined`.
|
||||
*/
|
||||
get(key: KeyType): ValueType | undefined;
|
||||
|
||||
/**
|
||||
Check if an item exists.
|
||||
*/
|
||||
has(key: KeyType): boolean;
|
||||
|
||||
/**
|
||||
Get an item without marking it as recently used.
|
||||
|
||||
@returns The stored item or `undefined`.
|
||||
*/
|
||||
peek(key: KeyType): ValueType | undefined;
|
||||
|
||||
/**
|
||||
Delete an item.
|
||||
|
||||
@returns `true` if the item is removed or `false` if the item doesn't exist.
|
||||
*/
|
||||
delete(key: KeyType): boolean;
|
||||
|
||||
/**
|
||||
Delete all items.
|
||||
*/
|
||||
clear(): void;
|
||||
|
||||
/**
|
||||
Update the `maxSize` in-place, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
|
||||
|
||||
Useful for on-the-fly tuning of cache sizes in live systems.
|
||||
*/
|
||||
resize(maxSize: number): void;
|
||||
|
||||
/**
|
||||
Iterable for all the keys.
|
||||
*/
|
||||
keys(): IterableIterator<KeyType>;
|
||||
|
||||
/**
|
||||
Iterable for all the values.
|
||||
*/
|
||||
values(): IterableIterator<ValueType>;
|
||||
|
||||
/**
|
||||
Iterable for all entries, starting with the oldest (ascending in recency).
|
||||
*/
|
||||
entriesAscending(): IterableIterator<[KeyType, ValueType]>;
|
||||
|
||||
/**
|
||||
Iterable for all entries, starting with the newest (descending in recency).
|
||||
*/
|
||||
entriesDescending(): IterableIterator<[KeyType, ValueType]>;
|
||||
}
|
||||
|
||||
export = QuickLRU;
|
263
node_modules/@alloc/quick-lru/index.js
generated
vendored
Normal file
263
node_modules/@alloc/quick-lru/index.js
generated
vendored
Normal file
@ -0,0 +1,263 @@
|
||||
'use strict';
|
||||
|
||||
class QuickLRU {
|
||||
constructor(options = {}) {
|
||||
if (!(options.maxSize && options.maxSize > 0)) {
|
||||
throw new TypeError('`maxSize` must be a number greater than 0');
|
||||
}
|
||||
|
||||
if (typeof options.maxAge === 'number' && options.maxAge === 0) {
|
||||
throw new TypeError('`maxAge` must be a number greater than 0');
|
||||
}
|
||||
|
||||
this.maxSize = options.maxSize;
|
||||
this.maxAge = options.maxAge || Infinity;
|
||||
this.onEviction = options.onEviction;
|
||||
this.cache = new Map();
|
||||
this.oldCache = new Map();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
_emitEvictions(cache) {
|
||||
if (typeof this.onEviction !== 'function') {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const [key, item] of cache) {
|
||||
this.onEviction(key, item.value);
|
||||
}
|
||||
}
|
||||
|
||||
_deleteIfExpired(key, item) {
|
||||
if (typeof item.expiry === 'number' && item.expiry <= Date.now()) {
|
||||
if (typeof this.onEviction === 'function') {
|
||||
this.onEviction(key, item.value);
|
||||
}
|
||||
|
||||
return this.delete(key);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
_getOrDeleteIfExpired(key, item) {
|
||||
const deleted = this._deleteIfExpired(key, item);
|
||||
if (deleted === false) {
|
||||
return item.value;
|
||||
}
|
||||
}
|
||||
|
||||
_getItemValue(key, item) {
|
||||
return item.expiry ? this._getOrDeleteIfExpired(key, item) : item.value;
|
||||
}
|
||||
|
||||
_peek(key, cache) {
|
||||
const item = cache.get(key);
|
||||
|
||||
return this._getItemValue(key, item);
|
||||
}
|
||||
|
||||
_set(key, value) {
|
||||
this.cache.set(key, value);
|
||||
this._size++;
|
||||
|
||||
if (this._size >= this.maxSize) {
|
||||
this._size = 0;
|
||||
this._emitEvictions(this.oldCache);
|
||||
this.oldCache = this.cache;
|
||||
this.cache = new Map();
|
||||
}
|
||||
}
|
||||
|
||||
_moveToRecent(key, item) {
|
||||
this.oldCache.delete(key);
|
||||
this._set(key, item);
|
||||
}
|
||||
|
||||
* _entriesAscending() {
|
||||
for (const item of this.oldCache) {
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of this.cache) {
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
get(key) {
|
||||
if (this.cache.has(key)) {
|
||||
const item = this.cache.get(key);
|
||||
|
||||
return this._getItemValue(key, item);
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
const item = this.oldCache.get(key);
|
||||
if (this._deleteIfExpired(key, item) === false) {
|
||||
this._moveToRecent(key, item);
|
||||
return item.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set(key, value, {maxAge = this.maxAge === Infinity ? undefined : Date.now() + this.maxAge} = {}) {
|
||||
if (this.cache.has(key)) {
|
||||
this.cache.set(key, {
|
||||
value,
|
||||
maxAge
|
||||
});
|
||||
} else {
|
||||
this._set(key, {value, expiry: maxAge});
|
||||
}
|
||||
}
|
||||
|
||||
has(key) {
|
||||
if (this.cache.has(key)) {
|
||||
return !this._deleteIfExpired(key, this.cache.get(key));
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
return !this._deleteIfExpired(key, this.oldCache.get(key));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
peek(key) {
|
||||
if (this.cache.has(key)) {
|
||||
return this._peek(key, this.cache);
|
||||
}
|
||||
|
||||
if (this.oldCache.has(key)) {
|
||||
return this._peek(key, this.oldCache);
|
||||
}
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
const deleted = this.cache.delete(key);
|
||||
if (deleted) {
|
||||
this._size--;
|
||||
}
|
||||
|
||||
return this.oldCache.delete(key) || deleted;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.cache.clear();
|
||||
this.oldCache.clear();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
resize(newSize) {
|
||||
if (!(newSize && newSize > 0)) {
|
||||
throw new TypeError('`maxSize` must be a number greater than 0');
|
||||
}
|
||||
|
||||
const items = [...this._entriesAscending()];
|
||||
const removeCount = items.length - newSize;
|
||||
if (removeCount < 0) {
|
||||
this.cache = new Map(items);
|
||||
this.oldCache = new Map();
|
||||
this._size = items.length;
|
||||
} else {
|
||||
if (removeCount > 0) {
|
||||
this._emitEvictions(items.slice(0, removeCount));
|
||||
}
|
||||
|
||||
this.oldCache = new Map(items.slice(removeCount));
|
||||
this.cache = new Map();
|
||||
this._size = 0;
|
||||
}
|
||||
|
||||
this.maxSize = newSize;
|
||||
}
|
||||
|
||||
* keys() {
|
||||
for (const [key] of this) {
|
||||
yield key;
|
||||
}
|
||||
}
|
||||
|
||||
* values() {
|
||||
for (const [, value] of this) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
|
||||
* [Symbol.iterator]() {
|
||||
for (const item of this.cache) {
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
for (const item of this.oldCache) {
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
* entriesDescending() {
|
||||
let items = [...this.cache];
|
||||
for (let i = items.length - 1; i >= 0; --i) {
|
||||
const item = items[i];
|
||||
const [key, value] = item;
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
items = [...this.oldCache];
|
||||
for (let i = items.length - 1; i >= 0; --i) {
|
||||
const item = items[i];
|
||||
const [key, value] = item;
|
||||
if (!this.cache.has(key)) {
|
||||
const deleted = this._deleteIfExpired(key, value);
|
||||
if (deleted === false) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
* entriesAscending() {
|
||||
for (const [key, value] of this._entriesAscending()) {
|
||||
yield [key, value.value];
|
||||
}
|
||||
}
|
||||
|
||||
get size() {
|
||||
if (!this._size) {
|
||||
return this.oldCache.size;
|
||||
}
|
||||
|
||||
let oldCacheSize = 0;
|
||||
for (const key of this.oldCache.keys()) {
|
||||
if (!this.cache.has(key)) {
|
||||
oldCacheSize++;
|
||||
}
|
||||
}
|
||||
|
||||
return Math.min(this._size + oldCacheSize, this.maxSize);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = QuickLRU;
|
9
node_modules/@alloc/quick-lru/license
generated
vendored
Normal file
9
node_modules/@alloc/quick-lru/license
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
43
node_modules/@alloc/quick-lru/package.json
generated
vendored
Normal file
43
node_modules/@alloc/quick-lru/package.json
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "@alloc/quick-lru",
|
||||
"version": "5.2.0",
|
||||
"description": "Simple “Least Recently Used” (LRU) cache",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/quick-lru",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && nyc ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"lru",
|
||||
"quick",
|
||||
"cache",
|
||||
"caching",
|
||||
"least",
|
||||
"recently",
|
||||
"used",
|
||||
"fast",
|
||||
"map",
|
||||
"hash",
|
||||
"buffer"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^2.0.0",
|
||||
"coveralls": "^3.0.3",
|
||||
"nyc": "^15.0.0",
|
||||
"tsd": "^0.11.0",
|
||||
"xo": "^0.26.0"
|
||||
}
|
||||
}
|
139
node_modules/@alloc/quick-lru/readme.md
generated
vendored
Normal file
139
node_modules/@alloc/quick-lru/readme.md
generated
vendored
Normal file
@ -0,0 +1,139 @@
|
||||
# quick-lru [](https://travis-ci.org/sindresorhus/quick-lru) [](https://coveralls.io/github/sindresorhus/quick-lru?branch=master)
|
||||
|
||||
> Simple [“Least Recently Used” (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29)
|
||||
|
||||
Useful when you need to cache something and limit memory usage.
|
||||
|
||||
Inspired by the [`hashlru` algorithm](https://github.com/dominictarr/hashlru#algorithm), but instead uses [`Map`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map) to support keys of any type, not just strings, and values can be `undefined`.
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install quick-lru
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const QuickLRU = require('quick-lru');
|
||||
|
||||
const lru = new QuickLRU({maxSize: 1000});
|
||||
|
||||
lru.set('🦄', '🌈');
|
||||
|
||||
lru.has('🦄');
|
||||
//=> true
|
||||
|
||||
lru.get('🦄');
|
||||
//=> '🌈'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### new QuickLRU(options?)
|
||||
|
||||
Returns a new instance.
|
||||
|
||||
### options
|
||||
|
||||
Type: `object`
|
||||
|
||||
#### maxSize
|
||||
|
||||
*Required*\
|
||||
Type: `number`
|
||||
|
||||
The maximum number of items before evicting the least recently used items.
|
||||
|
||||
#### maxAge
|
||||
|
||||
Type: `number`\
|
||||
Default: `Infinity`
|
||||
|
||||
The maximum number of milliseconds an item should remain in cache.
|
||||
By default maxAge will be Infinity, which means that items will never expire.
|
||||
|
||||
Lazy expiration happens upon the next `write` or `read` call.
|
||||
|
||||
Individual expiration of an item can be specified by the `set(key, value, options)` method.
|
||||
|
||||
#### onEviction
|
||||
|
||||
*Optional*\
|
||||
Type: `(key, value) => void`
|
||||
|
||||
Called right before an item is evicted from the cache.
|
||||
|
||||
Useful for side effects or for items like object URLs that need explicit cleanup (`revokeObjectURL`).
|
||||
|
||||
### Instance
|
||||
|
||||
The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop.
|
||||
|
||||
Both `key` and `value` can be of any type.
|
||||
|
||||
#### .set(key, value, options?)
|
||||
|
||||
Set an item. Returns the instance.
|
||||
|
||||
Individual expiration of an item can be specified with the `maxAge` option. If not specified, the global `maxAge` value will be used in case it is specified on the constructor, otherwise the item will never expire.
|
||||
|
||||
#### .get(key)
|
||||
|
||||
Get an item.
|
||||
|
||||
#### .has(key)
|
||||
|
||||
Check if an item exists.
|
||||
|
||||
#### .peek(key)
|
||||
|
||||
Get an item without marking it as recently used.
|
||||
|
||||
#### .delete(key)
|
||||
|
||||
Delete an item.
|
||||
|
||||
Returns `true` if the item is removed or `false` if the item doesn't exist.
|
||||
|
||||
#### .clear()
|
||||
|
||||
Delete all items.
|
||||
|
||||
#### .resize(maxSize)
|
||||
|
||||
Update the `maxSize`, discarding items as necessary. Insertion order is mostly preserved, though this is not a strong guarantee.
|
||||
|
||||
Useful for on-the-fly tuning of cache sizes in live systems.
|
||||
|
||||
#### .keys()
|
||||
|
||||
Iterable for all the keys.
|
||||
|
||||
#### .values()
|
||||
|
||||
Iterable for all the values.
|
||||
|
||||
#### .entriesAscending()
|
||||
|
||||
Iterable for all entries, starting with the oldest (ascending in recency).
|
||||
|
||||
#### .entriesDescending()
|
||||
|
||||
Iterable for all entries, starting with the newest (descending in recency).
|
||||
|
||||
#### .size
|
||||
|
||||
The stored item count.
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
<b>
|
||||
<a href="https://tidelift.com/subscription/pkg/npm-quick-lru?utm_source=npm-quick-lru&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
|
||||
</b>
|
||||
<br>
|
||||
<sub>
|
||||
Tidelift helps make open source sustainable for maintainers while giving companies<br>assurances about security, maintenance, and licensing for their dependencies.
|
||||
</sub>
|
||||
</div>
|
14
node_modules/@isaacs/cliui/LICENSE.txt
generated
vendored
Normal file
14
node_modules/@isaacs/cliui/LICENSE.txt
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
Copyright (c) 2015, Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software
|
||||
for any purpose with or without fee is hereby granted, provided
|
||||
that the above copyright notice and this permission notice
|
||||
appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE
|
||||
LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES
|
||||
OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
||||
WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
|
||||
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
143
node_modules/@isaacs/cliui/README.md
generated
vendored
Normal file
143
node_modules/@isaacs/cliui/README.md
generated
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
# @isaacs/cliui
|
||||
|
||||
Temporary fork of [cliui](http://npm.im/cliui).
|
||||
|
||||

|
||||
[](https://www.npmjs.com/package/cliui)
|
||||
[](https://conventionalcommits.org)
|
||||

|
||||
|
||||
easily create complex multi-column command-line-interfaces.
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
const ui = require('cliui')()
|
||||
|
||||
ui.div('Usage: $0 [command] [options]')
|
||||
|
||||
ui.div({
|
||||
text: 'Options:',
|
||||
padding: [2, 0, 1, 0]
|
||||
})
|
||||
|
||||
ui.div(
|
||||
{
|
||||
text: "-f, --file",
|
||||
width: 20,
|
||||
padding: [0, 4, 0, 4]
|
||||
},
|
||||
{
|
||||
text: "the file to load." +
|
||||
chalk.green("(if this description is long it wraps).")
|
||||
,
|
||||
width: 20
|
||||
},
|
||||
{
|
||||
text: chalk.red("[required]"),
|
||||
align: 'right'
|
||||
}
|
||||
)
|
||||
|
||||
console.log(ui.toString())
|
||||
```
|
||||
|
||||
## Deno/ESM Support
|
||||
|
||||
As of `v7` `cliui` supports [Deno](https://github.com/denoland/deno) and
|
||||
[ESM](https://nodejs.org/api/esm.html#esm_ecmascript_modules):
|
||||
|
||||
```typescript
|
||||
import cliui from "https://deno.land/x/cliui/deno.ts";
|
||||
|
||||
const ui = cliui({})
|
||||
|
||||
ui.div('Usage: $0 [command] [options]')
|
||||
|
||||
ui.div({
|
||||
text: 'Options:',
|
||||
padding: [2, 0, 1, 0]
|
||||
})
|
||||
|
||||
ui.div({
|
||||
text: "-f, --file",
|
||||
width: 20,
|
||||
padding: [0, 4, 0, 4]
|
||||
})
|
||||
|
||||
console.log(ui.toString())
|
||||
```
|
||||
|
||||
<img width="500" src="screenshot.png">
|
||||
|
||||
## Layout DSL
|
||||
|
||||
cliui exposes a simple layout DSL:
|
||||
|
||||
If you create a single `ui.div`, passing a string rather than an
|
||||
object:
|
||||
|
||||
* `\n`: characters will be interpreted as new rows.
|
||||
* `\t`: characters will be interpreted as new columns.
|
||||
* `\s`: characters will be interpreted as padding.
|
||||
|
||||
**as an example...**
|
||||
|
||||
```js
|
||||
var ui = require('./')({
|
||||
width: 60
|
||||
})
|
||||
|
||||
ui.div(
|
||||
'Usage: node ./bin/foo.js\n' +
|
||||
' <regex>\t provide a regex\n' +
|
||||
' <glob>\t provide a glob\t [required]'
|
||||
)
|
||||
|
||||
console.log(ui.toString())
|
||||
```
|
||||
|
||||
**will output:**
|
||||
|
||||
```shell
|
||||
Usage: node ./bin/foo.js
|
||||
<regex> provide a regex
|
||||
<glob> provide a glob [required]
|
||||
```
|
||||
|
||||
## Methods
|
||||
|
||||
```js
|
||||
cliui = require('cliui')
|
||||
```
|
||||
|
||||
### cliui({width: integer})
|
||||
|
||||
Specify the maximum width of the UI being generated.
|
||||
If no width is provided, cliui will try to get the current window's width and use it, and if that doesn't work, width will be set to `80`.
|
||||
|
||||
### cliui({wrap: boolean})
|
||||
|
||||
Enable or disable the wrapping of text in a column.
|
||||
|
||||
### cliui.div(column, column, column)
|
||||
|
||||
Create a row with any number of columns, a column
|
||||
can either be a string, or an object with the following
|
||||
options:
|
||||
|
||||
* **text:** some text to place in the column.
|
||||
* **width:** the width of a column.
|
||||
* **align:** alignment, `right` or `center`.
|
||||
* **padding:** `[top, right, bottom, left]`.
|
||||
* **border:** should a border be placed around the div?
|
||||
|
||||
### cliui.span(column, column, column)
|
||||
|
||||
Similar to `div`, except the next row will be appended without
|
||||
a new line being created.
|
||||
|
||||
### cliui.resetOutput()
|
||||
|
||||
Resets the UI elements of the current cliui instance, maintaining the values
|
||||
set for `width` and `wrap`.
|
317
node_modules/@isaacs/cliui/build/index.cjs
generated
vendored
Normal file
317
node_modules/@isaacs/cliui/build/index.cjs
generated
vendored
Normal file
@ -0,0 +1,317 @@
|
||||
'use strict';
|
||||
|
||||
const align = {
|
||||
right: alignRight,
|
||||
center: alignCenter
|
||||
};
|
||||
const top = 0;
|
||||
const right = 1;
|
||||
const bottom = 2;
|
||||
const left = 3;
|
||||
class UI {
|
||||
constructor(opts) {
|
||||
var _a;
|
||||
this.width = opts.width;
|
||||
/* c8 ignore start */
|
||||
this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true;
|
||||
/* c8 ignore stop */
|
||||
this.rows = [];
|
||||
}
|
||||
span(...args) {
|
||||
const cols = this.div(...args);
|
||||
cols.span = true;
|
||||
}
|
||||
resetOutput() {
|
||||
this.rows = [];
|
||||
}
|
||||
div(...args) {
|
||||
if (args.length === 0) {
|
||||
this.div('');
|
||||
}
|
||||
if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') {
|
||||
return this.applyLayoutDSL(args[0]);
|
||||
}
|
||||
const cols = args.map(arg => {
|
||||
if (typeof arg === 'string') {
|
||||
return this.colFromString(arg);
|
||||
}
|
||||
return arg;
|
||||
});
|
||||
this.rows.push(cols);
|
||||
return cols;
|
||||
}
|
||||
shouldApplyLayoutDSL(...args) {
|
||||
return args.length === 1 && typeof args[0] === 'string' &&
|
||||
/[\t\n]/.test(args[0]);
|
||||
}
|
||||
applyLayoutDSL(str) {
|
||||
const rows = str.split('\n').map(row => row.split('\t'));
|
||||
let leftColumnWidth = 0;
|
||||
// simple heuristic for layout, make sure the
|
||||
// second column lines up along the left-hand.
|
||||
// don't allow the first column to take up more
|
||||
// than 50% of the screen.
|
||||
rows.forEach(columns => {
|
||||
if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) {
|
||||
leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0]));
|
||||
}
|
||||
});
|
||||
// generate a table:
|
||||
// replacing ' ' with padding calculations.
|
||||
// using the algorithmically generated width.
|
||||
rows.forEach(columns => {
|
||||
this.div(...columns.map((r, i) => {
|
||||
return {
|
||||
text: r.trim(),
|
||||
padding: this.measurePadding(r),
|
||||
width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
|
||||
};
|
||||
}));
|
||||
});
|
||||
return this.rows[this.rows.length - 1];
|
||||
}
|
||||
colFromString(text) {
|
||||
return {
|
||||
text,
|
||||
padding: this.measurePadding(text)
|
||||
};
|
||||
}
|
||||
measurePadding(str) {
|
||||
// measure padding without ansi escape codes
|
||||
const noAnsi = mixin.stripAnsi(str);
|
||||
return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length];
|
||||
}
|
||||
toString() {
|
||||
const lines = [];
|
||||
this.rows.forEach(row => {
|
||||
this.rowToString(row, lines);
|
||||
});
|
||||
// don't display any lines with the
|
||||
// hidden flag set.
|
||||
return lines
|
||||
.filter(line => !line.hidden)
|
||||
.map(line => line.text)
|
||||
.join('\n');
|
||||
}
|
||||
rowToString(row, lines) {
|
||||
this.rasterize(row).forEach((rrow, r) => {
|
||||
let str = '';
|
||||
rrow.forEach((col, c) => {
|
||||
const { width } = row[c]; // the width with padding.
|
||||
const wrapWidth = this.negatePadding(row[c]); // the width without padding.
|
||||
let ts = col; // temporary string used during alignment/padding.
|
||||
if (wrapWidth > mixin.stringWidth(col)) {
|
||||
ts += ' '.repeat(wrapWidth - mixin.stringWidth(col));
|
||||
}
|
||||
// align the string within its column.
|
||||
if (row[c].align && row[c].align !== 'left' && this.wrap) {
|
||||
const fn = align[row[c].align];
|
||||
ts = fn(ts, wrapWidth);
|
||||
if (mixin.stringWidth(ts) < wrapWidth) {
|
||||
/* c8 ignore start */
|
||||
const w = width || 0;
|
||||
/* c8 ignore stop */
|
||||
ts += ' '.repeat(w - mixin.stringWidth(ts) - 1);
|
||||
}
|
||||
}
|
||||
// apply border and padding to string.
|
||||
const padding = row[c].padding || [0, 0, 0, 0];
|
||||
if (padding[left]) {
|
||||
str += ' '.repeat(padding[left]);
|
||||
}
|
||||
str += addBorder(row[c], ts, '| ');
|
||||
str += ts;
|
||||
str += addBorder(row[c], ts, ' |');
|
||||
if (padding[right]) {
|
||||
str += ' '.repeat(padding[right]);
|
||||
}
|
||||
// if prior row is span, try to render the
|
||||
// current row on the prior line.
|
||||
if (r === 0 && lines.length > 0) {
|
||||
str = this.renderInline(str, lines[lines.length - 1]);
|
||||
}
|
||||
});
|
||||
// remove trailing whitespace.
|
||||
lines.push({
|
||||
text: str.replace(/ +$/, ''),
|
||||
span: row.span
|
||||
});
|
||||
});
|
||||
return lines;
|
||||
}
|
||||
// if the full 'source' can render in
|
||||
// the target line, do so.
|
||||
renderInline(source, previousLine) {
|
||||
const match = source.match(/^ */);
|
||||
/* c8 ignore start */
|
||||
const leadingWhitespace = match ? match[0].length : 0;
|
||||
/* c8 ignore stop */
|
||||
const target = previousLine.text;
|
||||
const targetTextWidth = mixin.stringWidth(target.trimEnd());
|
||||
if (!previousLine.span) {
|
||||
return source;
|
||||
}
|
||||
// if we're not applying wrapping logic,
|
||||
// just always append to the span.
|
||||
if (!this.wrap) {
|
||||
previousLine.hidden = true;
|
||||
return target + source;
|
||||
}
|
||||
if (leadingWhitespace < targetTextWidth) {
|
||||
return source;
|
||||
}
|
||||
previousLine.hidden = true;
|
||||
return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart();
|
||||
}
|
||||
rasterize(row) {
|
||||
const rrows = [];
|
||||
const widths = this.columnWidths(row);
|
||||
let wrapped;
|
||||
// word wrap all columns, and create
|
||||
// a data-structure that is easy to rasterize.
|
||||
row.forEach((col, c) => {
|
||||
// leave room for left and right padding.
|
||||
col.width = widths[c];
|
||||
if (this.wrap) {
|
||||
wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n');
|
||||
}
|
||||
else {
|
||||
wrapped = col.text.split('\n');
|
||||
}
|
||||
if (col.border) {
|
||||
wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.');
|
||||
wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'");
|
||||
}
|
||||
// add top and bottom padding.
|
||||
if (col.padding) {
|
||||
wrapped.unshift(...new Array(col.padding[top] || 0).fill(''));
|
||||
wrapped.push(...new Array(col.padding[bottom] || 0).fill(''));
|
||||
}
|
||||
wrapped.forEach((str, r) => {
|
||||
if (!rrows[r]) {
|
||||
rrows.push([]);
|
||||
}
|
||||
const rrow = rrows[r];
|
||||
for (let i = 0; i < c; i++) {
|
||||
if (rrow[i] === undefined) {
|
||||
rrow.push('');
|
||||
}
|
||||
}
|
||||
rrow.push(str);
|
||||
});
|
||||
});
|
||||
return rrows;
|
||||
}
|
||||
negatePadding(col) {
|
||||
/* c8 ignore start */
|
||||
let wrapWidth = col.width || 0;
|
||||
/* c8 ignore stop */
|
||||
if (col.padding) {
|
||||
wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0);
|
||||
}
|
||||
if (col.border) {
|
||||
wrapWidth -= 4;
|
||||
}
|
||||
return wrapWidth;
|
||||
}
|
||||
columnWidths(row) {
|
||||
if (!this.wrap) {
|
||||
return row.map(col => {
|
||||
return col.width || mixin.stringWidth(col.text);
|
||||
});
|
||||
}
|
||||
let unset = row.length;
|
||||
let remainingWidth = this.width;
|
||||
// column widths can be set in config.
|
||||
const widths = row.map(col => {
|
||||
if (col.width) {
|
||||
unset--;
|
||||
remainingWidth -= col.width;
|
||||
return col.width;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
// any unset widths should be calculated.
|
||||
/* c8 ignore start */
|
||||
const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0;
|
||||
/* c8 ignore stop */
|
||||
return widths.map((w, i) => {
|
||||
if (w === undefined) {
|
||||
return Math.max(unsetWidth, _minWidth(row[i]));
|
||||
}
|
||||
return w;
|
||||
});
|
||||
}
|
||||
}
|
||||
function addBorder(col, ts, style) {
|
||||
if (col.border) {
|
||||
if (/[.']-+[.']/.test(ts)) {
|
||||
return '';
|
||||
}
|
||||
if (ts.trim().length !== 0) {
|
||||
return style;
|
||||
}
|
||||
return ' ';
|
||||
}
|
||||
return '';
|
||||
}
|
||||
// calculates the minimum width of
|
||||
// a column, based on padding preferences.
|
||||
function _minWidth(col) {
|
||||
const padding = col.padding || [];
|
||||
const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0);
|
||||
if (col.border) {
|
||||
return minWidth + 4;
|
||||
}
|
||||
return minWidth;
|
||||
}
|
||||
function getWindowWidth() {
|
||||
/* c8 ignore start */
|
||||
if (typeof process === 'object' && process.stdout && process.stdout.columns) {
|
||||
return process.stdout.columns;
|
||||
}
|
||||
return 80;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
function alignRight(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
if (strWidth < width) {
|
||||
return ' '.repeat(width - strWidth) + str;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
function alignCenter(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
/* c8 ignore start */
|
||||
if (strWidth >= width) {
|
||||
return str;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
return ' '.repeat((width - strWidth) >> 1) + str;
|
||||
}
|
||||
let mixin;
|
||||
function cliui(opts, _mixin) {
|
||||
mixin = _mixin;
|
||||
return new UI({
|
||||
/* c8 ignore start */
|
||||
width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(),
|
||||
wrap: opts === null || opts === void 0 ? void 0 : opts.wrap
|
||||
/* c8 ignore stop */
|
||||
});
|
||||
}
|
||||
|
||||
// Bootstrap cliui with CommonJS dependencies:
|
||||
const stringWidth = require('string-width-cjs');
|
||||
const stripAnsi = require('strip-ansi-cjs');
|
||||
const wrap = require('wrap-ansi-cjs');
|
||||
function ui(opts) {
|
||||
return cliui(opts, {
|
||||
stringWidth,
|
||||
stripAnsi,
|
||||
wrap
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = ui;
|
43
node_modules/@isaacs/cliui/build/index.d.cts
generated
vendored
Normal file
43
node_modules/@isaacs/cliui/build/index.d.cts
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
interface UIOptions {
|
||||
width: number;
|
||||
wrap?: boolean;
|
||||
rows?: string[];
|
||||
}
|
||||
interface Column {
|
||||
text: string;
|
||||
width?: number;
|
||||
align?: "right" | "left" | "center";
|
||||
padding: number[];
|
||||
border?: boolean;
|
||||
}
|
||||
interface ColumnArray extends Array<Column> {
|
||||
span: boolean;
|
||||
}
|
||||
interface Line {
|
||||
hidden?: boolean;
|
||||
text: string;
|
||||
span?: boolean;
|
||||
}
|
||||
declare class UI {
|
||||
width: number;
|
||||
wrap: boolean;
|
||||
rows: ColumnArray[];
|
||||
constructor(opts: UIOptions);
|
||||
span(...args: ColumnArray): void;
|
||||
resetOutput(): void;
|
||||
div(...args: (Column | string)[]): ColumnArray;
|
||||
private shouldApplyLayoutDSL;
|
||||
private applyLayoutDSL;
|
||||
private colFromString;
|
||||
private measurePadding;
|
||||
toString(): string;
|
||||
rowToString(row: ColumnArray, lines: Line[]): Line[];
|
||||
// if the full 'source' can render in
|
||||
// the target line, do so.
|
||||
private renderInline;
|
||||
private rasterize;
|
||||
private negatePadding;
|
||||
private columnWidths;
|
||||
}
|
||||
declare function ui(opts: UIOptions): UI;
|
||||
export { ui as default };
|
302
node_modules/@isaacs/cliui/build/lib/index.js
generated
vendored
Normal file
302
node_modules/@isaacs/cliui/build/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,302 @@
|
||||
'use strict';
|
||||
const align = {
|
||||
right: alignRight,
|
||||
center: alignCenter
|
||||
};
|
||||
const top = 0;
|
||||
const right = 1;
|
||||
const bottom = 2;
|
||||
const left = 3;
|
||||
export class UI {
|
||||
constructor(opts) {
|
||||
var _a;
|
||||
this.width = opts.width;
|
||||
/* c8 ignore start */
|
||||
this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true;
|
||||
/* c8 ignore stop */
|
||||
this.rows = [];
|
||||
}
|
||||
span(...args) {
|
||||
const cols = this.div(...args);
|
||||
cols.span = true;
|
||||
}
|
||||
resetOutput() {
|
||||
this.rows = [];
|
||||
}
|
||||
div(...args) {
|
||||
if (args.length === 0) {
|
||||
this.div('');
|
||||
}
|
||||
if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') {
|
||||
return this.applyLayoutDSL(args[0]);
|
||||
}
|
||||
const cols = args.map(arg => {
|
||||
if (typeof arg === 'string') {
|
||||
return this.colFromString(arg);
|
||||
}
|
||||
return arg;
|
||||
});
|
||||
this.rows.push(cols);
|
||||
return cols;
|
||||
}
|
||||
shouldApplyLayoutDSL(...args) {
|
||||
return args.length === 1 && typeof args[0] === 'string' &&
|
||||
/[\t\n]/.test(args[0]);
|
||||
}
|
||||
applyLayoutDSL(str) {
|
||||
const rows = str.split('\n').map(row => row.split('\t'));
|
||||
let leftColumnWidth = 0;
|
||||
// simple heuristic for layout, make sure the
|
||||
// second column lines up along the left-hand.
|
||||
// don't allow the first column to take up more
|
||||
// than 50% of the screen.
|
||||
rows.forEach(columns => {
|
||||
if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) {
|
||||
leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0]));
|
||||
}
|
||||
});
|
||||
// generate a table:
|
||||
// replacing ' ' with padding calculations.
|
||||
// using the algorithmically generated width.
|
||||
rows.forEach(columns => {
|
||||
this.div(...columns.map((r, i) => {
|
||||
return {
|
||||
text: r.trim(),
|
||||
padding: this.measurePadding(r),
|
||||
width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined
|
||||
};
|
||||
}));
|
||||
});
|
||||
return this.rows[this.rows.length - 1];
|
||||
}
|
||||
colFromString(text) {
|
||||
return {
|
||||
text,
|
||||
padding: this.measurePadding(text)
|
||||
};
|
||||
}
|
||||
measurePadding(str) {
|
||||
// measure padding without ansi escape codes
|
||||
const noAnsi = mixin.stripAnsi(str);
|
||||
return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length];
|
||||
}
|
||||
toString() {
|
||||
const lines = [];
|
||||
this.rows.forEach(row => {
|
||||
this.rowToString(row, lines);
|
||||
});
|
||||
// don't display any lines with the
|
||||
// hidden flag set.
|
||||
return lines
|
||||
.filter(line => !line.hidden)
|
||||
.map(line => line.text)
|
||||
.join('\n');
|
||||
}
|
||||
rowToString(row, lines) {
|
||||
this.rasterize(row).forEach((rrow, r) => {
|
||||
let str = '';
|
||||
rrow.forEach((col, c) => {
|
||||
const { width } = row[c]; // the width with padding.
|
||||
const wrapWidth = this.negatePadding(row[c]); // the width without padding.
|
||||
let ts = col; // temporary string used during alignment/padding.
|
||||
if (wrapWidth > mixin.stringWidth(col)) {
|
||||
ts += ' '.repeat(wrapWidth - mixin.stringWidth(col));
|
||||
}
|
||||
// align the string within its column.
|
||||
if (row[c].align && row[c].align !== 'left' && this.wrap) {
|
||||
const fn = align[row[c].align];
|
||||
ts = fn(ts, wrapWidth);
|
||||
if (mixin.stringWidth(ts) < wrapWidth) {
|
||||
/* c8 ignore start */
|
||||
const w = width || 0;
|
||||
/* c8 ignore stop */
|
||||
ts += ' '.repeat(w - mixin.stringWidth(ts) - 1);
|
||||
}
|
||||
}
|
||||
// apply border and padding to string.
|
||||
const padding = row[c].padding || [0, 0, 0, 0];
|
||||
if (padding[left]) {
|
||||
str += ' '.repeat(padding[left]);
|
||||
}
|
||||
str += addBorder(row[c], ts, '| ');
|
||||
str += ts;
|
||||
str += addBorder(row[c], ts, ' |');
|
||||
if (padding[right]) {
|
||||
str += ' '.repeat(padding[right]);
|
||||
}
|
||||
// if prior row is span, try to render the
|
||||
// current row on the prior line.
|
||||
if (r === 0 && lines.length > 0) {
|
||||
str = this.renderInline(str, lines[lines.length - 1]);
|
||||
}
|
||||
});
|
||||
// remove trailing whitespace.
|
||||
lines.push({
|
||||
text: str.replace(/ +$/, ''),
|
||||
span: row.span
|
||||
});
|
||||
});
|
||||
return lines;
|
||||
}
|
||||
// if the full 'source' can render in
|
||||
// the target line, do so.
|
||||
renderInline(source, previousLine) {
|
||||
const match = source.match(/^ */);
|
||||
/* c8 ignore start */
|
||||
const leadingWhitespace = match ? match[0].length : 0;
|
||||
/* c8 ignore stop */
|
||||
const target = previousLine.text;
|
||||
const targetTextWidth = mixin.stringWidth(target.trimEnd());
|
||||
if (!previousLine.span) {
|
||||
return source;
|
||||
}
|
||||
// if we're not applying wrapping logic,
|
||||
// just always append to the span.
|
||||
if (!this.wrap) {
|
||||
previousLine.hidden = true;
|
||||
return target + source;
|
||||
}
|
||||
if (leadingWhitespace < targetTextWidth) {
|
||||
return source;
|
||||
}
|
||||
previousLine.hidden = true;
|
||||
return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart();
|
||||
}
|
||||
rasterize(row) {
|
||||
const rrows = [];
|
||||
const widths = this.columnWidths(row);
|
||||
let wrapped;
|
||||
// word wrap all columns, and create
|
||||
// a data-structure that is easy to rasterize.
|
||||
row.forEach((col, c) => {
|
||||
// leave room for left and right padding.
|
||||
col.width = widths[c];
|
||||
if (this.wrap) {
|
||||
wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n');
|
||||
}
|
||||
else {
|
||||
wrapped = col.text.split('\n');
|
||||
}
|
||||
if (col.border) {
|
||||
wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.');
|
||||
wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'");
|
||||
}
|
||||
// add top and bottom padding.
|
||||
if (col.padding) {
|
||||
wrapped.unshift(...new Array(col.padding[top] || 0).fill(''));
|
||||
wrapped.push(...new Array(col.padding[bottom] || 0).fill(''));
|
||||
}
|
||||
wrapped.forEach((str, r) => {
|
||||
if (!rrows[r]) {
|
||||
rrows.push([]);
|
||||
}
|
||||
const rrow = rrows[r];
|
||||
for (let i = 0; i < c; i++) {
|
||||
if (rrow[i] === undefined) {
|
||||
rrow.push('');
|
||||
}
|
||||
}
|
||||
rrow.push(str);
|
||||
});
|
||||
});
|
||||
return rrows;
|
||||
}
|
||||
negatePadding(col) {
|
||||
/* c8 ignore start */
|
||||
let wrapWidth = col.width || 0;
|
||||
/* c8 ignore stop */
|
||||
if (col.padding) {
|
||||
wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0);
|
||||
}
|
||||
if (col.border) {
|
||||
wrapWidth -= 4;
|
||||
}
|
||||
return wrapWidth;
|
||||
}
|
||||
columnWidths(row) {
|
||||
if (!this.wrap) {
|
||||
return row.map(col => {
|
||||
return col.width || mixin.stringWidth(col.text);
|
||||
});
|
||||
}
|
||||
let unset = row.length;
|
||||
let remainingWidth = this.width;
|
||||
// column widths can be set in config.
|
||||
const widths = row.map(col => {
|
||||
if (col.width) {
|
||||
unset--;
|
||||
remainingWidth -= col.width;
|
||||
return col.width;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
// any unset widths should be calculated.
|
||||
/* c8 ignore start */
|
||||
const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0;
|
||||
/* c8 ignore stop */
|
||||
return widths.map((w, i) => {
|
||||
if (w === undefined) {
|
||||
return Math.max(unsetWidth, _minWidth(row[i]));
|
||||
}
|
||||
return w;
|
||||
});
|
||||
}
|
||||
}
|
||||
function addBorder(col, ts, style) {
|
||||
if (col.border) {
|
||||
if (/[.']-+[.']/.test(ts)) {
|
||||
return '';
|
||||
}
|
||||
if (ts.trim().length !== 0) {
|
||||
return style;
|
||||
}
|
||||
return ' ';
|
||||
}
|
||||
return '';
|
||||
}
|
||||
// calculates the minimum width of
|
||||
// a column, based on padding preferences.
|
||||
function _minWidth(col) {
|
||||
const padding = col.padding || [];
|
||||
const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0);
|
||||
if (col.border) {
|
||||
return minWidth + 4;
|
||||
}
|
||||
return minWidth;
|
||||
}
|
||||
function getWindowWidth() {
|
||||
/* c8 ignore start */
|
||||
if (typeof process === 'object' && process.stdout && process.stdout.columns) {
|
||||
return process.stdout.columns;
|
||||
}
|
||||
return 80;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
function alignRight(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
if (strWidth < width) {
|
||||
return ' '.repeat(width - strWidth) + str;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
function alignCenter(str, width) {
|
||||
str = str.trim();
|
||||
const strWidth = mixin.stringWidth(str);
|
||||
/* c8 ignore start */
|
||||
if (strWidth >= width) {
|
||||
return str;
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
return ' '.repeat((width - strWidth) >> 1) + str;
|
||||
}
|
||||
let mixin;
|
||||
export function cliui(opts, _mixin) {
|
||||
mixin = _mixin;
|
||||
return new UI({
|
||||
/* c8 ignore start */
|
||||
width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(),
|
||||
wrap: opts === null || opts === void 0 ? void 0 : opts.wrap
|
||||
/* c8 ignore stop */
|
||||
});
|
||||
}
|
14
node_modules/@isaacs/cliui/index.mjs
generated
vendored
Normal file
14
node_modules/@isaacs/cliui/index.mjs
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
// Bootstrap cliui with ESM dependencies:
|
||||
import { cliui } from './build/lib/index.js'
|
||||
|
||||
import stringWidth from 'string-width'
|
||||
import stripAnsi from 'strip-ansi'
|
||||
import wrap from 'wrap-ansi'
|
||||
|
||||
export default function ui (opts) {
|
||||
return cliui(opts, {
|
||||
stringWidth,
|
||||
stripAnsi,
|
||||
wrap
|
||||
})
|
||||
}
|
86
node_modules/@isaacs/cliui/package.json
generated
vendored
Normal file
86
node_modules/@isaacs/cliui/package.json
generated
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
{
|
||||
"name": "@isaacs/cliui",
|
||||
"version": "8.0.2",
|
||||
"description": "easily create complex multi-column command-line-interfaces",
|
||||
"main": "build/index.cjs",
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"import": "./index.mjs",
|
||||
"require": "./build/index.cjs"
|
||||
},
|
||||
"./build/index.cjs"
|
||||
]
|
||||
},
|
||||
"type": "module",
|
||||
"module": "./index.mjs",
|
||||
"scripts": {
|
||||
"check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'",
|
||||
"fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'",
|
||||
"pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs",
|
||||
"test": "c8 mocha ./test/*.cjs",
|
||||
"test:esm": "c8 mocha ./test/**/*.mjs",
|
||||
"postest": "check",
|
||||
"coverage": "c8 report --check-coverage",
|
||||
"precompile": "rimraf build",
|
||||
"compile": "tsc",
|
||||
"postcompile": "npm run build:cjs",
|
||||
"build:cjs": "rollup -c",
|
||||
"prepare": "npm run compile"
|
||||
},
|
||||
"repository": "yargs/cliui",
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"**/example/**"
|
||||
],
|
||||
"globals": [
|
||||
"it"
|
||||
]
|
||||
},
|
||||
"keywords": [
|
||||
"cli",
|
||||
"command-line",
|
||||
"layout",
|
||||
"design",
|
||||
"console",
|
||||
"wrap",
|
||||
"table"
|
||||
],
|
||||
"author": "Ben Coe <ben@npmjs.com>",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"string-width": "^5.1.2",
|
||||
"string-width-cjs": "npm:string-width@^4.2.0",
|
||||
"strip-ansi": "^7.0.1",
|
||||
"strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
|
||||
"wrap-ansi": "^8.1.0",
|
||||
"wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^14.0.27",
|
||||
"@typescript-eslint/eslint-plugin": "^4.0.0",
|
||||
"@typescript-eslint/parser": "^4.0.0",
|
||||
"c8": "^7.3.0",
|
||||
"chai": "^4.2.0",
|
||||
"chalk": "^4.1.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"eslint": "^7.6.0",
|
||||
"eslint-plugin-import": "^2.22.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"gts": "^3.0.0",
|
||||
"mocha": "^10.0.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"rollup": "^2.23.1",
|
||||
"rollup-plugin-ts": "^3.0.2",
|
||||
"standardx": "^7.0.0",
|
||||
"typescript": "^4.0.0"
|
||||
},
|
||||
"files": [
|
||||
"build",
|
||||
"index.mjs",
|
||||
"!*.d.ts"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
}
|
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2015 Rich Harris
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
Normal file
@ -0,0 +1,264 @@
|
||||
# @jridgewell/sourcemap-codec
|
||||
|
||||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
||||
|
||||
|
||||
## Why?
|
||||
|
||||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
||||
|
||||
This package makes the process slightly easier.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @jridgewell/sourcemap-codec
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
||||
|
||||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
|
||||
assert.deepEqual( decoded, [
|
||||
// the first line (of the generated code) has no mappings,
|
||||
// as shown by the starting semi-colon (which separates lines)
|
||||
[],
|
||||
|
||||
// the second line contains four (comma-separated) segments
|
||||
[
|
||||
// segments are encoded as you'd expect:
|
||||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
||||
|
||||
// i.e. the first segment begins at column 2, and maps back to the second column
|
||||
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
||||
// name in the `map.names` array
|
||||
[ 2, 0, 2, 2, 0 ],
|
||||
|
||||
// the remaining segments are 4-length rather than 5-length,
|
||||
// because they don't map a name
|
||||
[ 4, 0, 2, 4 ],
|
||||
[ 6, 0, 2, 5 ],
|
||||
[ 7, 0, 2, 7 ]
|
||||
],
|
||||
|
||||
// the final line contains two segments
|
||||
[
|
||||
[ 2, 1, 10, 19 ],
|
||||
[ 12, 1, 11, 20 ]
|
||||
]
|
||||
]);
|
||||
|
||||
var encoded = encode( decoded );
|
||||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
```
|
||||
node v20.10.0
|
||||
|
||||
amp.js.map - 45120 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 5815135 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 5868160 bytes
|
||||
sourcemap-codec 5492584 bytes
|
||||
source-map-0.6.1 13569984 bytes
|
||||
source-map-0.8.0 6390584 bytes
|
||||
chrome dev tools 8011136 bytes
|
||||
Smallest memory usage is sourcemap-codec
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 492 ops/sec ±1.22% (90 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 499 ops/sec ±1.16% (89 runs sampled)
|
||||
decode: sourcemap-codec x 376 ops/sec ±1.66% (89 runs sampled)
|
||||
decode: source-map-0.6.1 x 34.99 ops/sec ±0.94% (48 runs sampled)
|
||||
decode: source-map-0.8.0 x 351 ops/sec ±0.07% (95 runs sampled)
|
||||
chrome dev tools x 165 ops/sec ±0.91% (86 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 444248 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 623024 bytes
|
||||
sourcemap-codec 8696280 bytes
|
||||
source-map-0.6.1 8745176 bytes
|
||||
source-map-0.8.0 8736624 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 796 ops/sec ±0.11% (97 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 795 ops/sec ±0.25% (98 runs sampled)
|
||||
encode: sourcemap-codec x 231 ops/sec ±0.83% (86 runs sampled)
|
||||
encode: source-map-0.6.1 x 166 ops/sec ±0.57% (86 runs sampled)
|
||||
encode: source-map-0.8.0 x 203 ops/sec ±0.45% (88 runs sampled)
|
||||
Fastest is encode: local code,encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
babel.min.js.map - 347793 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 35424960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 35424696 bytes
|
||||
sourcemap-codec 36033464 bytes
|
||||
source-map-0.6.1 62253704 bytes
|
||||
source-map-0.8.0 43843920 bytes
|
||||
chrome dev tools 45111400 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 38.18 ops/sec ±5.44% (52 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 38.36 ops/sec ±5.02% (52 runs sampled)
|
||||
decode: sourcemap-codec x 34.05 ops/sec ±4.45% (47 runs sampled)
|
||||
decode: source-map-0.6.1 x 4.31 ops/sec ±2.76% (15 runs sampled)
|
||||
decode: source-map-0.8.0 x 55.60 ops/sec ±0.13% (73 runs sampled)
|
||||
chrome dev tools x 16.94 ops/sec ±3.78% (46 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 2606016 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 2626440 bytes
|
||||
sourcemap-codec 21152576 bytes
|
||||
source-map-0.6.1 25023928 bytes
|
||||
source-map-0.8.0 25256448 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 127 ops/sec ±0.18% (83 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 128 ops/sec ±0.26% (83 runs sampled)
|
||||
encode: sourcemap-codec x 29.31 ops/sec ±2.55% (53 runs sampled)
|
||||
encode: source-map-0.6.1 x 18.85 ops/sec ±3.19% (36 runs sampled)
|
||||
encode: source-map-0.8.0 x 19.34 ops/sec ±1.97% (36 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
preact.js.map - 1992 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 261696 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 244296 bytes
|
||||
sourcemap-codec 302816 bytes
|
||||
source-map-0.6.1 939176 bytes
|
||||
source-map-0.8.0 336 bytes
|
||||
chrome dev tools 587368 bytes
|
||||
Smallest memory usage is source-map-0.8.0
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 17,782 ops/sec ±0.32% (97 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 17,863 ops/sec ±0.40% (100 runs sampled)
|
||||
decode: sourcemap-codec x 12,453 ops/sec ±0.27% (101 runs sampled)
|
||||
decode: source-map-0.6.1 x 1,288 ops/sec ±1.05% (96 runs sampled)
|
||||
decode: source-map-0.8.0 x 9,289 ops/sec ±0.27% (101 runs sampled)
|
||||
chrome dev tools x 4,769 ops/sec ±0.18% (100 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 262944 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 25544 bytes
|
||||
sourcemap-codec 323048 bytes
|
||||
source-map-0.6.1 507808 bytes
|
||||
source-map-0.8.0 507480 bytes
|
||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 24,207 ops/sec ±0.79% (95 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 24,288 ops/sec ±0.48% (96 runs sampled)
|
||||
encode: sourcemap-codec x 6,761 ops/sec ±0.21% (100 runs sampled)
|
||||
encode: source-map-0.6.1 x 5,374 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 5,633 ops/sec ±0.32% (99 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15,encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
react.js.map - 5726 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 678816 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 678816 bytes
|
||||
sourcemap-codec 816400 bytes
|
||||
source-map-0.6.1 2288864 bytes
|
||||
source-map-0.8.0 721360 bytes
|
||||
chrome dev tools 1012512 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 6,178 ops/sec ±0.19% (98 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 6,261 ops/sec ±0.22% (100 runs sampled)
|
||||
decode: sourcemap-codec x 4,472 ops/sec ±0.90% (99 runs sampled)
|
||||
decode: source-map-0.6.1 x 449 ops/sec ±0.31% (95 runs sampled)
|
||||
decode: source-map-0.8.0 x 3,219 ops/sec ±0.13% (100 runs sampled)
|
||||
chrome dev tools x 1,743 ops/sec ±0.20% (99 runs sampled)
|
||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 140960 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 159808 bytes
|
||||
sourcemap-codec 969304 bytes
|
||||
source-map-0.6.1 930520 bytes
|
||||
source-map-0.8.0 930248 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 8,013 ops/sec ±0.19% (100 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 7,989 ops/sec ±0.20% (101 runs sampled)
|
||||
encode: sourcemap-codec x 2,472 ops/sec ±0.21% (99 runs sampled)
|
||||
encode: source-map-0.6.1 x 2,200 ops/sec ±0.17% (99 runs sampled)
|
||||
encode: source-map-0.8.0 x 2,220 ops/sec ±0.37% (99 runs sampled)
|
||||
Fastest is encode: local code
|
||||
|
||||
|
||||
***
|
||||
|
||||
|
||||
vscode.map - 2141001 segments
|
||||
|
||||
Decode Memory Usage:
|
||||
local code 198955264 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 199175352 bytes
|
||||
sourcemap-codec 199102688 bytes
|
||||
source-map-0.6.1 386323432 bytes
|
||||
source-map-0.8.0 244116432 bytes
|
||||
chrome dev tools 293734280 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Decode speed:
|
||||
decode: local code x 3.90 ops/sec ±22.21% (15 runs sampled)
|
||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 3.95 ops/sec ±23.53% (15 runs sampled)
|
||||
decode: sourcemap-codec x 3.82 ops/sec ±17.94% (14 runs sampled)
|
||||
decode: source-map-0.6.1 x 0.61 ops/sec ±7.81% (6 runs sampled)
|
||||
decode: source-map-0.8.0 x 9.54 ops/sec ±0.28% (28 runs sampled)
|
||||
chrome dev tools x 2.18 ops/sec ±10.58% (10 runs sampled)
|
||||
Fastest is decode: source-map-0.8.0
|
||||
|
||||
Encode Memory Usage:
|
||||
local code 13509880 bytes
|
||||
@jridgewell/sourcemap-codec 1.4.15 13537648 bytes
|
||||
sourcemap-codec 32540104 bytes
|
||||
source-map-0.6.1 127531040 bytes
|
||||
source-map-0.8.0 127535312 bytes
|
||||
Smallest memory usage is local code
|
||||
|
||||
Encode speed:
|
||||
encode: local code x 20.10 ops/sec ±0.19% (38 runs sampled)
|
||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 20.26 ops/sec ±0.32% (38 runs sampled)
|
||||
encode: sourcemap-codec x 5.44 ops/sec ±1.64% (18 runs sampled)
|
||||
encode: source-map-0.6.1 x 2.30 ops/sec ±4.79% (10 runs sampled)
|
||||
encode: source-map-0.8.0 x 2.46 ops/sec ±6.53% (10 runs sampled)
|
||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
||||
```
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
Normal file
@ -0,0 +1,424 @@
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
export { decode, decodeGeneratedRanges, decodeOriginalScopes, encode, encodeGeneratedRanges, encodeOriginalScopes };
|
||||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
Normal file
@ -0,0 +1,439 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
||||
})(this, (function (exports) { 'use strict';
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const semicolon = ';'.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -0x80000000 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function encodeInteger(builder, num, relative) {
|
||||
let delta = num - relative;
|
||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
||||
do {
|
||||
let clamped = delta & 0b011111;
|
||||
delta >>>= 5;
|
||||
if (delta > 0)
|
||||
clamped |= 0b100000;
|
||||
builder.write(intToChar[clamped]);
|
||||
} while (delta > 0);
|
||||
return num;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
|
||||
const bufLength = 1024 * 16;
|
||||
// Provide a fallback for older environments.
|
||||
const td = typeof TextDecoder !== 'undefined'
|
||||
? /* #__PURE__ */ new TextDecoder()
|
||||
: typeof Buffer !== 'undefined'
|
||||
? {
|
||||
decode(buf) {
|
||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
return out.toString();
|
||||
},
|
||||
}
|
||||
: {
|
||||
decode(buf) {
|
||||
let out = '';
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
out += String.fromCharCode(buf[i]);
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
class StringWriter {
|
||||
constructor() {
|
||||
this.pos = 0;
|
||||
this.out = '';
|
||||
this.buffer = new Uint8Array(bufLength);
|
||||
}
|
||||
write(v) {
|
||||
const { buffer } = this;
|
||||
buffer[this.pos++] = v;
|
||||
if (this.pos === bufLength) {
|
||||
this.out += td.decode(buffer);
|
||||
this.pos = 0;
|
||||
}
|
||||
}
|
||||
flush() {
|
||||
const { buffer, out, pos } = this;
|
||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
||||
}
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
const EMPTY = [];
|
||||
function decodeOriginalScopes(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const scopes = [];
|
||||
const stack = [];
|
||||
let line = 0;
|
||||
for (; reader.pos < length; reader.pos++) {
|
||||
line = decodeInteger(reader, line);
|
||||
const column = decodeInteger(reader, 0);
|
||||
if (!hasMoreVlq(reader, length)) {
|
||||
const last = stack.pop();
|
||||
last[2] = line;
|
||||
last[3] = column;
|
||||
continue;
|
||||
}
|
||||
const kind = decodeInteger(reader, 0);
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasName = fields & 0b0001;
|
||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
||||
let vars = EMPTY;
|
||||
if (hasMoreVlq(reader, length)) {
|
||||
vars = [];
|
||||
do {
|
||||
const varsIndex = decodeInteger(reader, 0);
|
||||
vars.push(varsIndex);
|
||||
} while (hasMoreVlq(reader, length));
|
||||
}
|
||||
scope.vars = vars;
|
||||
scopes.push(scope);
|
||||
stack.push(scope);
|
||||
}
|
||||
return scopes;
|
||||
}
|
||||
function encodeOriginalScopes(scopes) {
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < scopes.length;) {
|
||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
||||
const scope = scopes[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
||||
if (index > 0)
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
||||
encodeInteger(writer, startColumn, 0);
|
||||
encodeInteger(writer, kind, 0);
|
||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (scope.length === 6)
|
||||
encodeInteger(writer, scope[5], 0);
|
||||
for (const v of vars) {
|
||||
encodeInteger(writer, v, 0);
|
||||
}
|
||||
for (index++; index < scopes.length;) {
|
||||
const next = scopes[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
||||
}
|
||||
writer.write(comma);
|
||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
||||
encodeInteger(writer, endColumn, 0);
|
||||
return index;
|
||||
}
|
||||
function decodeGeneratedRanges(input) {
|
||||
const { length } = input;
|
||||
const reader = new StringReader(input);
|
||||
const ranges = [];
|
||||
const stack = [];
|
||||
let genLine = 0;
|
||||
let definitionSourcesIndex = 0;
|
||||
let definitionScopeIndex = 0;
|
||||
let callsiteSourcesIndex = 0;
|
||||
let callsiteLine = 0;
|
||||
let callsiteColumn = 0;
|
||||
let bindingLine = 0;
|
||||
let bindingColumn = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
let genColumn = 0;
|
||||
for (; reader.pos < semi; reader.pos++) {
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (!hasMoreVlq(reader, semi)) {
|
||||
const last = stack.pop();
|
||||
last[2] = genLine;
|
||||
last[3] = genColumn;
|
||||
continue;
|
||||
}
|
||||
const fields = decodeInteger(reader, 0);
|
||||
const hasDefinition = fields & 0b0001;
|
||||
const hasCallsite = fields & 0b0010;
|
||||
const hasScope = fields & 0b0100;
|
||||
let callsite = null;
|
||||
let bindings = EMPTY;
|
||||
let range;
|
||||
if (hasDefinition) {
|
||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
||||
definitionSourcesIndex = defSourcesIndex;
|
||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
||||
}
|
||||
else {
|
||||
range = [genLine, genColumn, 0, 0];
|
||||
}
|
||||
range.isScope = !!hasScope;
|
||||
if (hasCallsite) {
|
||||
const prevCsi = callsiteSourcesIndex;
|
||||
const prevLine = callsiteLine;
|
||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
||||
}
|
||||
range.callsite = callsite;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
bindings = [];
|
||||
do {
|
||||
bindingLine = genLine;
|
||||
bindingColumn = genColumn;
|
||||
const expressionsCount = decodeInteger(reader, 0);
|
||||
let expressionRanges;
|
||||
if (expressionsCount < -1) {
|
||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
||||
for (let i = -1; i > expressionsCount; i--) {
|
||||
const prevBl = bindingLine;
|
||||
bindingLine = decodeInteger(reader, bindingLine);
|
||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
||||
const expression = decodeInteger(reader, 0);
|
||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
||||
}
|
||||
}
|
||||
else {
|
||||
expressionRanges = [[expressionsCount]];
|
||||
}
|
||||
bindings.push(expressionRanges);
|
||||
} while (hasMoreVlq(reader, semi));
|
||||
}
|
||||
range.bindings = bindings;
|
||||
ranges.push(range);
|
||||
stack.push(range);
|
||||
}
|
||||
genLine++;
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos < length);
|
||||
return ranges;
|
||||
}
|
||||
function encodeGeneratedRanges(ranges) {
|
||||
if (ranges.length === 0)
|
||||
return '';
|
||||
const writer = new StringWriter();
|
||||
for (let i = 0; i < ranges.length;) {
|
||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
||||
const range = ranges[index];
|
||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
||||
if (state[0] < startLine) {
|
||||
catchupLine(writer, state[0], startLine);
|
||||
state[0] = startLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else if (index > 0) {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
||||
encodeInteger(writer, fields, 0);
|
||||
if (range.length === 6) {
|
||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
||||
if (sourcesIndex !== state[2]) {
|
||||
state[3] = 0;
|
||||
}
|
||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
||||
}
|
||||
if (callsite) {
|
||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
||||
if (sourcesIndex !== state[4]) {
|
||||
state[5] = 0;
|
||||
state[6] = 0;
|
||||
}
|
||||
else if (callLine !== state[5]) {
|
||||
state[6] = 0;
|
||||
}
|
||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
||||
}
|
||||
if (bindings) {
|
||||
for (const binding of bindings) {
|
||||
if (binding.length > 1)
|
||||
encodeInteger(writer, -binding.length, 0);
|
||||
const expression = binding[0][0];
|
||||
encodeInteger(writer, expression, 0);
|
||||
let bindingStartLine = startLine;
|
||||
let bindingStartColumn = startColumn;
|
||||
for (let i = 1; i < binding.length; i++) {
|
||||
const expRange = binding[i];
|
||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
||||
encodeInteger(writer, expRange[0], 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (index++; index < ranges.length;) {
|
||||
const next = ranges[index];
|
||||
const { 0: l, 1: c } = next;
|
||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
||||
break;
|
||||
}
|
||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
||||
}
|
||||
if (state[0] < endLine) {
|
||||
catchupLine(writer, state[0], endLine);
|
||||
state[0] = endLine;
|
||||
state[1] = 0;
|
||||
}
|
||||
else {
|
||||
writer.write(comma);
|
||||
}
|
||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
||||
return index;
|
||||
}
|
||||
function catchupLine(writer, lastLine, line) {
|
||||
do {
|
||||
writer.write(semicolon);
|
||||
} while (++lastLine < line);
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
function encode(decoded) {
|
||||
const writer = new StringWriter();
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
for (let i = 0; i < decoded.length; i++) {
|
||||
const line = decoded[i];
|
||||
if (i > 0)
|
||||
writer.write(semicolon);
|
||||
if (line.length === 0)
|
||||
continue;
|
||||
let genColumn = 0;
|
||||
for (let j = 0; j < line.length; j++) {
|
||||
const segment = line[j];
|
||||
if (j > 0)
|
||||
writer.write(comma);
|
||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
||||
if (segment.length === 1)
|
||||
continue;
|
||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
||||
if (segment.length === 4)
|
||||
continue;
|
||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
||||
}
|
||||
}
|
||||
return writer.flush();
|
||||
}
|
||||
|
||||
exports.decode = decode;
|
||||
exports.decodeGeneratedRanges = decodeGeneratedRanges;
|
||||
exports.decodeOriginalScopes = decodeOriginalScopes;
|
||||
exports.encode = encode;
|
||||
exports.encodeGeneratedRanges = encodeGeneratedRanges;
|
||||
exports.encodeOriginalScopes = encodeOriginalScopes;
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
}));
|
||||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
declare type Line = number;
|
||||
declare type Column = number;
|
||||
declare type Kind = number;
|
||||
declare type Name = number;
|
||||
declare type Var = number;
|
||||
declare type SourcesIndex = number;
|
||||
declare type ScopesIndex = number;
|
||||
declare type Mix<A, B, O> = (A & O) | (B & O);
|
||||
export declare type OriginalScope = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
Kind,
|
||||
Name
|
||||
], {
|
||||
vars: Var[];
|
||||
}>;
|
||||
export declare type GeneratedRange = Mix<[
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column
|
||||
], [
|
||||
Line,
|
||||
Column,
|
||||
Line,
|
||||
Column,
|
||||
SourcesIndex,
|
||||
ScopesIndex
|
||||
], {
|
||||
callsite: CallSite | null;
|
||||
bindings: Binding[];
|
||||
isScope: boolean;
|
||||
}>;
|
||||
export declare type CallSite = [SourcesIndex, Line, Column];
|
||||
declare type Binding = BindingExpressionRange[];
|
||||
export declare type BindingExpressionRange = [Name] | [Name, Line, Column];
|
||||
export declare function decodeOriginalScopes(input: string): OriginalScope[];
|
||||
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
|
||||
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
|
||||
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
|
||||
export {};
|
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes';
|
||||
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
|
||||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
||||
export declare type SourceMapLine = SourceMapSegment[];
|
||||
export declare type SourceMapMappings = SourceMapLine[];
|
||||
export declare function decode(mappings: string): SourceMapMappings;
|
||||
export declare function encode(decoded: SourceMapMappings): string;
|
||||
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
export declare class StringWriter {
|
||||
pos: number;
|
||||
private out;
|
||||
private buffer;
|
||||
write(v: number): void;
|
||||
flush(): string;
|
||||
}
|
||||
export declare class StringReader {
|
||||
pos: number;
|
||||
private buffer;
|
||||
constructor(buffer: string);
|
||||
next(): number;
|
||||
peek(): number;
|
||||
indexOf(char: string): number;
|
||||
}
|
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type { StringReader, StringWriter } from './strings';
|
||||
export declare const comma: number;
|
||||
export declare const semicolon: number;
|
||||
export declare function decodeInteger(reader: StringReader, relative: number): number;
|
||||
export declare function encodeInteger(builder: StringWriter, num: number, relative: number): number;
|
||||
export declare function hasMoreVlq(reader: StringReader, max: number): boolean;
|
75
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
75
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
{
|
||||
"name": "@jridgewell/sourcemap-codec",
|
||||
"version": "1.5.0",
|
||||
"description": "Encode/decode sourcemap mappings",
|
||||
"keywords": [
|
||||
"sourcemap",
|
||||
"vlq"
|
||||
],
|
||||
"main": "dist/sourcemap-codec.umd.js",
|
||||
"module": "dist/sourcemap-codec.mjs",
|
||||
"types": "dist/types/sourcemap-codec.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"exports": {
|
||||
".": [
|
||||
{
|
||||
"types": "./dist/types/sourcemap-codec.d.ts",
|
||||
"browser": "./dist/sourcemap-codec.umd.js",
|
||||
"require": "./dist/sourcemap-codec.umd.js",
|
||||
"import": "./dist/sourcemap-codec.mjs"
|
||||
},
|
||||
"./dist/sourcemap-codec.umd.js"
|
||||
],
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"scripts": {
|
||||
"benchmark": "run-s build:rollup benchmark:*",
|
||||
"benchmark:install": "cd benchmark && npm install",
|
||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
||||
"build": "run-s -n build:*",
|
||||
"build:rollup": "rollup -c rollup.config.js",
|
||||
"build:ts": "tsc --project tsconfig.build.json",
|
||||
"lint": "run-s -n lint:*",
|
||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||
"prebuild": "rm -rf dist",
|
||||
"prepublishOnly": "npm run preversion",
|
||||
"preversion": "run-s test build",
|
||||
"test": "run-s -n test:lint test:only",
|
||||
"test:debug": "mocha --inspect-brk",
|
||||
"test:lint": "run-s -n test:lint:*",
|
||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||
"test:only": "mocha",
|
||||
"test:coverage": "c8 mocha",
|
||||
"test:watch": "mocha --watch"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
||||
},
|
||||
"author": "Rich Harris",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@rollup/plugin-typescript": "8.3.0",
|
||||
"@types/mocha": "10.0.6",
|
||||
"@types/node": "17.0.15",
|
||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
||||
"@typescript-eslint/parser": "5.10.0",
|
||||
"benchmark": "2.1.4",
|
||||
"c8": "7.11.2",
|
||||
"eslint": "8.7.0",
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"mocha": "9.2.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.5.1",
|
||||
"rollup": "2.64.0",
|
||||
"source-map": "0.6.1",
|
||||
"source-map-js": "1.0.2",
|
||||
"sourcemap-codec": "1.4.8",
|
||||
"tsx": "4.7.1",
|
||||
"typescript": "4.5.4"
|
||||
}
|
||||
}
|
21
node_modules/@nodelib/fs.scandir/LICENSE
generated
vendored
Normal file
21
node_modules/@nodelib/fs.scandir/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Denis Malinochkin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
171
node_modules/@nodelib/fs.scandir/README.md
generated
vendored
Normal file
171
node_modules/@nodelib/fs.scandir/README.md
generated
vendored
Normal file
@ -0,0 +1,171 @@
|
||||
# @nodelib/fs.scandir
|
||||
|
||||
> List files and directories inside the specified directory.
|
||||
|
||||
## :bulb: Highlights
|
||||
|
||||
The package is aimed at obtaining information about entries in the directory.
|
||||
|
||||
* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional).
|
||||
* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode).
|
||||
* :link: Can safely work with broken symbolic links.
|
||||
|
||||
## Install
|
||||
|
||||
```console
|
||||
npm install @nodelib/fs.scandir
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import * as fsScandir from '@nodelib/fs.scandir';
|
||||
|
||||
fsScandir.scandir('path', (error, stats) => { /* … */ });
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### .scandir(path, [optionsOrSettings], callback)
|
||||
|
||||
Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style.
|
||||
|
||||
```ts
|
||||
fsScandir.scandir('path', (error, entries) => { /* … */ });
|
||||
fsScandir.scandir('path', {}, (error, entries) => { /* … */ });
|
||||
fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ });
|
||||
```
|
||||
|
||||
### .scandirSync(path, [optionsOrSettings])
|
||||
|
||||
Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path.
|
||||
|
||||
```ts
|
||||
const entries = fsScandir.scandirSync('path');
|
||||
const entries = fsScandir.scandirSync('path', {});
|
||||
const entries = fsScandir.scandirSync(('path', new fsScandir.Settings());
|
||||
```
|
||||
|
||||
#### path
|
||||
|
||||
* Required: `true`
|
||||
* Type: `string | Buffer | URL`
|
||||
|
||||
A path to a file. If a URL is provided, it must use the `file:` protocol.
|
||||
|
||||
#### optionsOrSettings
|
||||
|
||||
* Required: `false`
|
||||
* Type: `Options | Settings`
|
||||
* Default: An instance of `Settings` class
|
||||
|
||||
An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class.
|
||||
|
||||
> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class.
|
||||
|
||||
### Settings([options])
|
||||
|
||||
A class of full settings of the package.
|
||||
|
||||
```ts
|
||||
const settings = new fsScandir.Settings({ followSymbolicLinks: false });
|
||||
|
||||
const entries = fsScandir.scandirSync('path', settings);
|
||||
```
|
||||
|
||||
## Entry
|
||||
|
||||
* `name` — The name of the entry (`unknown.txt`).
|
||||
* `path` — The path of the entry relative to call directory (`root/unknown.txt`).
|
||||
* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class.
|
||||
* `stats` (optional) — An instance of `fs.Stats` class.
|
||||
|
||||
For example, the `scandir` call for `tools` directory with one directory inside:
|
||||
|
||||
```ts
|
||||
{
|
||||
dirent: Dirent { name: 'typedoc', /* … */ },
|
||||
name: 'typedoc',
|
||||
path: 'tools/typedoc'
|
||||
}
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
### stats
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `false`
|
||||
|
||||
Adds an instance of `fs.Stats` class to the [`Entry`](#entry).
|
||||
|
||||
> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO??
|
||||
|
||||
### followSymbolicLinks
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `false`
|
||||
|
||||
Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`.
|
||||
|
||||
### `throwErrorOnBrokenSymbolicLink`
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `true`
|
||||
|
||||
Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`.
|
||||
|
||||
### `pathSegmentSeparator`
|
||||
|
||||
* Type: `string`
|
||||
* Default: `path.sep`
|
||||
|
||||
By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead.
|
||||
|
||||
### `fs`
|
||||
|
||||
* Type: [`FileSystemAdapter`](./src/adapters/fs.ts)
|
||||
* Default: A default FS methods
|
||||
|
||||
By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own.
|
||||
|
||||
```ts
|
||||
interface FileSystemAdapter {
|
||||
lstat?: typeof fs.lstat;
|
||||
stat?: typeof fs.stat;
|
||||
lstatSync?: typeof fs.lstatSync;
|
||||
statSync?: typeof fs.statSync;
|
||||
readdir?: typeof fs.readdir;
|
||||
readdirSync?: typeof fs.readdirSync;
|
||||
}
|
||||
|
||||
const settings = new fsScandir.Settings({
|
||||
fs: { lstat: fakeLstat }
|
||||
});
|
||||
```
|
||||
|
||||
## `old` and `modern` mode
|
||||
|
||||
This package has two modes that are used depending on the environment and parameters of use.
|
||||
|
||||
### old
|
||||
|
||||
* Node.js below `10.10` or when the `stats` option is enabled
|
||||
|
||||
When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links).
|
||||
|
||||
### modern
|
||||
|
||||
* Node.js 10.10+ and the `stats` option is disabled
|
||||
|
||||
In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present.
|
||||
|
||||
This mode makes fewer calls to the file system. It's faster.
|
||||
|
||||
## Changelog
|
||||
|
||||
See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version.
|
||||
|
||||
## License
|
||||
|
||||
This software is released under the terms of the MIT license.
|
20
node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts
generated
vendored
Normal file
20
node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
import type * as fsStat from '@nodelib/fs.stat';
|
||||
import type { Dirent, ErrnoException } from '../types';
|
||||
export interface ReaddirAsynchronousMethod {
|
||||
(filepath: string, options: {
|
||||
withFileTypes: true;
|
||||
}, callback: (error: ErrnoException | null, files: Dirent[]) => void): void;
|
||||
(filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void;
|
||||
}
|
||||
export interface ReaddirSynchronousMethod {
|
||||
(filepath: string, options: {
|
||||
withFileTypes: true;
|
||||
}): Dirent[];
|
||||
(filepath: string): string[];
|
||||
}
|
||||
export declare type FileSystemAdapter = fsStat.FileSystemAdapter & {
|
||||
readdir: ReaddirAsynchronousMethod;
|
||||
readdirSync: ReaddirSynchronousMethod;
|
||||
};
|
||||
export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter;
|
||||
export declare function createFileSystemAdapter(fsMethods?: Partial<FileSystemAdapter>): FileSystemAdapter;
|
19
node_modules/@nodelib/fs.scandir/out/adapters/fs.js
generated
vendored
Normal file
19
node_modules/@nodelib/fs.scandir/out/adapters/fs.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
|
||||
const fs = require("fs");
|
||||
exports.FILE_SYSTEM_ADAPTER = {
|
||||
lstat: fs.lstat,
|
||||
stat: fs.stat,
|
||||
lstatSync: fs.lstatSync,
|
||||
statSync: fs.statSync,
|
||||
readdir: fs.readdir,
|
||||
readdirSync: fs.readdirSync
|
||||
};
|
||||
function createFileSystemAdapter(fsMethods) {
|
||||
if (fsMethods === undefined) {
|
||||
return exports.FILE_SYSTEM_ADAPTER;
|
||||
}
|
||||
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
|
||||
}
|
||||
exports.createFileSystemAdapter = createFileSystemAdapter;
|
4
node_modules/@nodelib/fs.scandir/out/constants.d.ts
generated
vendored
Normal file
4
node_modules/@nodelib/fs.scandir/out/constants.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/**
|
||||
* IS `true` for Node.js 10.10 and greater.
|
||||
*/
|
||||
export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean;
|
17
node_modules/@nodelib/fs.scandir/out/constants.js
generated
vendored
Normal file
17
node_modules/@nodelib/fs.scandir/out/constants.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
|
||||
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
|
||||
if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) {
|
||||
throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`);
|
||||
}
|
||||
const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
|
||||
const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
|
||||
const SUPPORTED_MAJOR_VERSION = 10;
|
||||
const SUPPORTED_MINOR_VERSION = 10;
|
||||
const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
|
||||
const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
|
||||
/**
|
||||
* IS `true` for Node.js 10.10 and greater.
|
||||
*/
|
||||
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;
|
12
node_modules/@nodelib/fs.scandir/out/index.d.ts
generated
vendored
Normal file
12
node_modules/@nodelib/fs.scandir/out/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs';
|
||||
import * as async from './providers/async';
|
||||
import Settings, { Options } from './settings';
|
||||
import type { Dirent, Entry } from './types';
|
||||
declare type AsyncCallback = async.AsyncCallback;
|
||||
declare function scandir(path: string, callback: AsyncCallback): void;
|
||||
declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void;
|
||||
declare namespace scandir {
|
||||
function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise<Entry[]>;
|
||||
}
|
||||
declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[];
|
||||
export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options };
|
26
node_modules/@nodelib/fs.scandir/out/index.js
generated
vendored
Normal file
26
node_modules/@nodelib/fs.scandir/out/index.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Settings = exports.scandirSync = exports.scandir = void 0;
|
||||
const async = require("./providers/async");
|
||||
const sync = require("./providers/sync");
|
||||
const settings_1 = require("./settings");
|
||||
exports.Settings = settings_1.default;
|
||||
function scandir(path, optionsOrSettingsOrCallback, callback) {
|
||||
if (typeof optionsOrSettingsOrCallback === 'function') {
|
||||
async.read(path, getSettings(), optionsOrSettingsOrCallback);
|
||||
return;
|
||||
}
|
||||
async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports.scandir = scandir;
|
||||
function scandirSync(path, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path, settings);
|
||||
}
|
||||
exports.scandirSync = scandirSync;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
}
|
||||
return new settings_1.default(settingsOrOptions);
|
||||
}
|
7
node_modules/@nodelib/fs.scandir/out/providers/async.d.ts
generated
vendored
Normal file
7
node_modules/@nodelib/fs.scandir/out/providers/async.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
/// <reference types="node" />
|
||||
import type Settings from '../settings';
|
||||
import type { Entry } from '../types';
|
||||
export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void;
|
||||
export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void;
|
||||
export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void;
|
||||
export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void;
|
104
node_modules/@nodelib/fs.scandir/out/providers/async.js
generated
vendored
Normal file
104
node_modules/@nodelib/fs.scandir/out/providers/async.js
generated
vendored
Normal file
@ -0,0 +1,104 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
|
||||
const fsStat = require("@nodelib/fs.stat");
|
||||
const rpl = require("run-parallel");
|
||||
const constants_1 = require("../constants");
|
||||
const utils = require("../utils");
|
||||
const common = require("./common");
|
||||
function read(directory, settings, callback) {
|
||||
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
|
||||
readdirWithFileTypes(directory, settings, callback);
|
||||
return;
|
||||
}
|
||||
readdir(directory, settings, callback);
|
||||
}
|
||||
exports.read = read;
|
||||
function readdirWithFileTypes(directory, settings, callback) {
|
||||
settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {
|
||||
if (readdirError !== null) {
|
||||
callFailureCallback(callback, readdirError);
|
||||
return;
|
||||
}
|
||||
const entries = dirents.map((dirent) => ({
|
||||
dirent,
|
||||
name: dirent.name,
|
||||
path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
|
||||
}));
|
||||
if (!settings.followSymbolicLinks) {
|
||||
callSuccessCallback(callback, entries);
|
||||
return;
|
||||
}
|
||||
const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));
|
||||
rpl(tasks, (rplError, rplEntries) => {
|
||||
if (rplError !== null) {
|
||||
callFailureCallback(callback, rplError);
|
||||
return;
|
||||
}
|
||||
callSuccessCallback(callback, rplEntries);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.readdirWithFileTypes = readdirWithFileTypes;
|
||||
function makeRplTaskEntry(entry, settings) {
|
||||
return (done) => {
|
||||
if (!entry.dirent.isSymbolicLink()) {
|
||||
done(null, entry);
|
||||
return;
|
||||
}
|
||||
settings.fs.stat(entry.path, (statError, stats) => {
|
||||
if (statError !== null) {
|
||||
if (settings.throwErrorOnBrokenSymbolicLink) {
|
||||
done(statError);
|
||||
return;
|
||||
}
|
||||
done(null, entry);
|
||||
return;
|
||||
}
|
||||
entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
|
||||
done(null, entry);
|
||||
});
|
||||
};
|
||||
}
|
||||
function readdir(directory, settings, callback) {
|
||||
settings.fs.readdir(directory, (readdirError, names) => {
|
||||
if (readdirError !== null) {
|
||||
callFailureCallback(callback, readdirError);
|
||||
return;
|
||||
}
|
||||
const tasks = names.map((name) => {
|
||||
const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);
|
||||
return (done) => {
|
||||
fsStat.stat(path, settings.fsStatSettings, (error, stats) => {
|
||||
if (error !== null) {
|
||||
done(error);
|
||||
return;
|
||||
}
|
||||
const entry = {
|
||||
name,
|
||||
path,
|
||||
dirent: utils.fs.createDirentFromStats(name, stats)
|
||||
};
|
||||
if (settings.stats) {
|
||||
entry.stats = stats;
|
||||
}
|
||||
done(null, entry);
|
||||
});
|
||||
};
|
||||
});
|
||||
rpl(tasks, (rplError, entries) => {
|
||||
if (rplError !== null) {
|
||||
callFailureCallback(callback, rplError);
|
||||
return;
|
||||
}
|
||||
callSuccessCallback(callback, entries);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.readdir = readdir;
|
||||
function callFailureCallback(callback, error) {
|
||||
callback(error);
|
||||
}
|
||||
function callSuccessCallback(callback, result) {
|
||||
callback(null, result);
|
||||
}
|
1
node_modules/@nodelib/fs.scandir/out/providers/common.d.ts
generated
vendored
Normal file
1
node_modules/@nodelib/fs.scandir/out/providers/common.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export declare function joinPathSegments(a: string, b: string, separator: string): string;
|
13
node_modules/@nodelib/fs.scandir/out/providers/common.js
generated
vendored
Normal file
13
node_modules/@nodelib/fs.scandir/out/providers/common.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.joinPathSegments = void 0;
|
||||
function joinPathSegments(a, b, separator) {
|
||||
/**
|
||||
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
|
||||
*/
|
||||
if (a.endsWith(separator)) {
|
||||
return a + b;
|
||||
}
|
||||
return a + separator + b;
|
||||
}
|
||||
exports.joinPathSegments = joinPathSegments;
|
5
node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts
generated
vendored
Normal file
5
node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
import type Settings from '../settings';
|
||||
import type { Entry } from '../types';
|
||||
export declare function read(directory: string, settings: Settings): Entry[];
|
||||
export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[];
|
||||
export declare function readdir(directory: string, settings: Settings): Entry[];
|
54
node_modules/@nodelib/fs.scandir/out/providers/sync.js
generated
vendored
Normal file
54
node_modules/@nodelib/fs.scandir/out/providers/sync.js
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
|
||||
const fsStat = require("@nodelib/fs.stat");
|
||||
const constants_1 = require("../constants");
|
||||
const utils = require("../utils");
|
||||
const common = require("./common");
|
||||
function read(directory, settings) {
|
||||
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
|
||||
return readdirWithFileTypes(directory, settings);
|
||||
}
|
||||
return readdir(directory, settings);
|
||||
}
|
||||
exports.read = read;
|
||||
function readdirWithFileTypes(directory, settings) {
|
||||
const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });
|
||||
return dirents.map((dirent) => {
|
||||
const entry = {
|
||||
dirent,
|
||||
name: dirent.name,
|
||||
path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
|
||||
};
|
||||
if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
|
||||
try {
|
||||
const stats = settings.fs.statSync(entry.path);
|
||||
entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
|
||||
}
|
||||
catch (error) {
|
||||
if (settings.throwErrorOnBrokenSymbolicLink) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
}
|
||||
exports.readdirWithFileTypes = readdirWithFileTypes;
|
||||
function readdir(directory, settings) {
|
||||
const names = settings.fs.readdirSync(directory);
|
||||
return names.map((name) => {
|
||||
const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);
|
||||
const stats = fsStat.statSync(entryPath, settings.fsStatSettings);
|
||||
const entry = {
|
||||
name,
|
||||
path: entryPath,
|
||||
dirent: utils.fs.createDirentFromStats(name, stats)
|
||||
};
|
||||
if (settings.stats) {
|
||||
entry.stats = stats;
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
}
|
||||
exports.readdir = readdir;
|
20
node_modules/@nodelib/fs.scandir/out/settings.d.ts
generated
vendored
Normal file
20
node_modules/@nodelib/fs.scandir/out/settings.d.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
import * as fsStat from '@nodelib/fs.stat';
|
||||
import * as fs from './adapters/fs';
|
||||
export interface Options {
|
||||
followSymbolicLinks?: boolean;
|
||||
fs?: Partial<fs.FileSystemAdapter>;
|
||||
pathSegmentSeparator?: string;
|
||||
stats?: boolean;
|
||||
throwErrorOnBrokenSymbolicLink?: boolean;
|
||||
}
|
||||
export default class Settings {
|
||||
private readonly _options;
|
||||
readonly followSymbolicLinks: boolean;
|
||||
readonly fs: fs.FileSystemAdapter;
|
||||
readonly pathSegmentSeparator: string;
|
||||
readonly stats: boolean;
|
||||
readonly throwErrorOnBrokenSymbolicLink: boolean;
|
||||
readonly fsStatSettings: fsStat.Settings;
|
||||
constructor(_options?: Options);
|
||||
private _getValue;
|
||||
}
|
24
node_modules/@nodelib/fs.scandir/out/settings.js
generated
vendored
Normal file
24
node_modules/@nodelib/fs.scandir/out/settings.js
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const fsStat = require("@nodelib/fs.stat");
|
||||
const fs = require("./adapters/fs");
|
||||
class Settings {
|
||||
constructor(_options = {}) {
|
||||
this._options = _options;
|
||||
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
|
||||
this.fs = fs.createFileSystemAdapter(this._options.fs);
|
||||
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
|
||||
this.stats = this._getValue(this._options.stats, false);
|
||||
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
|
||||
this.fsStatSettings = new fsStat.Settings({
|
||||
followSymbolicLink: this.followSymbolicLinks,
|
||||
fs: this.fs,
|
||||
throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
|
||||
});
|
||||
}
|
||||
_getValue(option, value) {
|
||||
return option !== null && option !== void 0 ? option : value;
|
||||
}
|
||||
}
|
||||
exports.default = Settings;
|
20
node_modules/@nodelib/fs.scandir/out/types/index.d.ts
generated
vendored
Normal file
20
node_modules/@nodelib/fs.scandir/out/types/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
/// <reference types="node" />
|
||||
import type * as fs from 'fs';
|
||||
export interface Entry {
|
||||
dirent: Dirent;
|
||||
name: string;
|
||||
path: string;
|
||||
stats?: Stats;
|
||||
}
|
||||
export declare type Stats = fs.Stats;
|
||||
export declare type ErrnoException = NodeJS.ErrnoException;
|
||||
export interface Dirent {
|
||||
isBlockDevice: () => boolean;
|
||||
isCharacterDevice: () => boolean;
|
||||
isDirectory: () => boolean;
|
||||
isFIFO: () => boolean;
|
||||
isFile: () => boolean;
|
||||
isSocket: () => boolean;
|
||||
isSymbolicLink: () => boolean;
|
||||
name: string;
|
||||
}
|
2
node_modules/@nodelib/fs.scandir/out/types/index.js
generated
vendored
Normal file
2
node_modules/@nodelib/fs.scandir/out/types/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
2
node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts
generated
vendored
Normal file
2
node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import type { Dirent, Stats } from '../types';
|
||||
export declare function createDirentFromStats(name: string, stats: Stats): Dirent;
|
19
node_modules/@nodelib/fs.scandir/out/utils/fs.js
generated
vendored
Normal file
19
node_modules/@nodelib/fs.scandir/out/utils/fs.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createDirentFromStats = void 0;
|
||||
class DirentFromStats {
|
||||
constructor(name, stats) {
|
||||
this.name = name;
|
||||
this.isBlockDevice = stats.isBlockDevice.bind(stats);
|
||||
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
|
||||
this.isDirectory = stats.isDirectory.bind(stats);
|
||||
this.isFIFO = stats.isFIFO.bind(stats);
|
||||
this.isFile = stats.isFile.bind(stats);
|
||||
this.isSocket = stats.isSocket.bind(stats);
|
||||
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
|
||||
}
|
||||
}
|
||||
function createDirentFromStats(name, stats) {
|
||||
return new DirentFromStats(name, stats);
|
||||
}
|
||||
exports.createDirentFromStats = createDirentFromStats;
|
2
node_modules/@nodelib/fs.scandir/out/utils/index.d.ts
generated
vendored
Normal file
2
node_modules/@nodelib/fs.scandir/out/utils/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
import * as fs from './fs';
|
||||
export { fs };
|
5
node_modules/@nodelib/fs.scandir/out/utils/index.js
generated
vendored
Normal file
5
node_modules/@nodelib/fs.scandir/out/utils/index.js
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.fs = void 0;
|
||||
const fs = require("./fs");
|
||||
exports.fs = fs;
|
44
node_modules/@nodelib/fs.scandir/package.json
generated
vendored
Normal file
44
node_modules/@nodelib/fs.scandir/package.json
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"name": "@nodelib/fs.scandir",
|
||||
"version": "2.1.5",
|
||||
"description": "List files and directories inside the specified directory",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir",
|
||||
"keywords": [
|
||||
"NodeLib",
|
||||
"fs",
|
||||
"FileSystem",
|
||||
"file system",
|
||||
"scandir",
|
||||
"readdir",
|
||||
"dirent"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
},
|
||||
"files": [
|
||||
"out/**",
|
||||
"!out/**/*.map",
|
||||
"!out/**/*.spec.*"
|
||||
],
|
||||
"main": "out/index.js",
|
||||
"typings": "out/index.d.ts",
|
||||
"scripts": {
|
||||
"clean": "rimraf {tsconfig.tsbuildinfo,out}",
|
||||
"lint": "eslint \"src/**/*.ts\" --cache",
|
||||
"compile": "tsc -b .",
|
||||
"compile:watch": "tsc -p . --watch --sourceMap",
|
||||
"test": "mocha \"out/**/*.spec.js\" -s 0",
|
||||
"build": "npm run clean && npm run compile && npm run lint && npm test",
|
||||
"watch": "npm run clean && npm run compile:watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@nodelib/fs.stat": "2.0.5",
|
||||
"run-parallel": "^1.1.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nodelib/fs.macchiato": "1.0.4",
|
||||
"@types/run-parallel": "^1.1.0"
|
||||
},
|
||||
"gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562"
|
||||
}
|
21
node_modules/@nodelib/fs.stat/LICENSE
generated
vendored
Normal file
21
node_modules/@nodelib/fs.stat/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Denis Malinochkin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
126
node_modules/@nodelib/fs.stat/README.md
generated
vendored
Normal file
126
node_modules/@nodelib/fs.stat/README.md
generated
vendored
Normal file
@ -0,0 +1,126 @@
|
||||
# @nodelib/fs.stat
|
||||
|
||||
> Get the status of a file with some features.
|
||||
|
||||
## :bulb: Highlights
|
||||
|
||||
Wrapper around standard method `fs.lstat` and `fs.stat` with some features.
|
||||
|
||||
* :beginner: Normally follows symbolic link.
|
||||
* :gear: Can safely work with broken symbolic link.
|
||||
|
||||
## Install
|
||||
|
||||
```console
|
||||
npm install @nodelib/fs.stat
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import * as fsStat from '@nodelib/fs.stat';
|
||||
|
||||
fsStat.stat('path', (error, stats) => { /* … */ });
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### .stat(path, [optionsOrSettings], callback)
|
||||
|
||||
Returns an instance of `fs.Stats` class for provided path with standard callback-style.
|
||||
|
||||
```ts
|
||||
fsStat.stat('path', (error, stats) => { /* … */ });
|
||||
fsStat.stat('path', {}, (error, stats) => { /* … */ });
|
||||
fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ });
|
||||
```
|
||||
|
||||
### .statSync(path, [optionsOrSettings])
|
||||
|
||||
Returns an instance of `fs.Stats` class for provided path.
|
||||
|
||||
```ts
|
||||
const stats = fsStat.stat('path');
|
||||
const stats = fsStat.stat('path', {});
|
||||
const stats = fsStat.stat('path', new fsStat.Settings());
|
||||
```
|
||||
|
||||
#### path
|
||||
|
||||
* Required: `true`
|
||||
* Type: `string | Buffer | URL`
|
||||
|
||||
A path to a file. If a URL is provided, it must use the `file:` protocol.
|
||||
|
||||
#### optionsOrSettings
|
||||
|
||||
* Required: `false`
|
||||
* Type: `Options | Settings`
|
||||
* Default: An instance of `Settings` class
|
||||
|
||||
An [`Options`](#options) object or an instance of [`Settings`](#settings) class.
|
||||
|
||||
> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class.
|
||||
|
||||
### Settings([options])
|
||||
|
||||
A class of full settings of the package.
|
||||
|
||||
```ts
|
||||
const settings = new fsStat.Settings({ followSymbolicLink: false });
|
||||
|
||||
const stats = fsStat.stat('path', settings);
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
### `followSymbolicLink`
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `true`
|
||||
|
||||
Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`.
|
||||
|
||||
### `markSymbolicLink`
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `false`
|
||||
|
||||
Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`).
|
||||
|
||||
> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link.
|
||||
|
||||
### `throwErrorOnBrokenSymbolicLink`
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `true`
|
||||
|
||||
Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`.
|
||||
|
||||
### `fs`
|
||||
|
||||
* Type: [`FileSystemAdapter`](./src/adapters/fs.ts)
|
||||
* Default: A default FS methods
|
||||
|
||||
By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own.
|
||||
|
||||
```ts
|
||||
interface FileSystemAdapter {
|
||||
lstat?: typeof fs.lstat;
|
||||
stat?: typeof fs.stat;
|
||||
lstatSync?: typeof fs.lstatSync;
|
||||
statSync?: typeof fs.statSync;
|
||||
}
|
||||
|
||||
const settings = new fsStat.Settings({
|
||||
fs: { lstat: fakeLstat }
|
||||
});
|
||||
```
|
||||
|
||||
## Changelog
|
||||
|
||||
See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version.
|
||||
|
||||
## License
|
||||
|
||||
This software is released under the terms of the MIT license.
|
13
node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts
generated
vendored
Normal file
13
node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
/// <reference types="node" />
|
||||
import * as fs from 'fs';
|
||||
import type { ErrnoException } from '../types';
|
||||
export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void;
|
||||
export declare type StatSynchronousMethod = (path: string) => fs.Stats;
|
||||
export interface FileSystemAdapter {
|
||||
lstat: StatAsynchronousMethod;
|
||||
stat: StatAsynchronousMethod;
|
||||
lstatSync: StatSynchronousMethod;
|
||||
statSync: StatSynchronousMethod;
|
||||
}
|
||||
export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter;
|
||||
export declare function createFileSystemAdapter(fsMethods?: Partial<FileSystemAdapter>): FileSystemAdapter;
|
17
node_modules/@nodelib/fs.stat/out/adapters/fs.js
generated
vendored
Normal file
17
node_modules/@nodelib/fs.stat/out/adapters/fs.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
|
||||
const fs = require("fs");
|
||||
exports.FILE_SYSTEM_ADAPTER = {
|
||||
lstat: fs.lstat,
|
||||
stat: fs.stat,
|
||||
lstatSync: fs.lstatSync,
|
||||
statSync: fs.statSync
|
||||
};
|
||||
function createFileSystemAdapter(fsMethods) {
|
||||
if (fsMethods === undefined) {
|
||||
return exports.FILE_SYSTEM_ADAPTER;
|
||||
}
|
||||
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
|
||||
}
|
||||
exports.createFileSystemAdapter = createFileSystemAdapter;
|
12
node_modules/@nodelib/fs.stat/out/index.d.ts
generated
vendored
Normal file
12
node_modules/@nodelib/fs.stat/out/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs';
|
||||
import * as async from './providers/async';
|
||||
import Settings, { Options } from './settings';
|
||||
import type { Stats } from './types';
|
||||
declare type AsyncCallback = async.AsyncCallback;
|
||||
declare function stat(path: string, callback: AsyncCallback): void;
|
||||
declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void;
|
||||
declare namespace stat {
|
||||
function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise<Stats>;
|
||||
}
|
||||
declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats;
|
||||
export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats };
|
26
node_modules/@nodelib/fs.stat/out/index.js
generated
vendored
Normal file
26
node_modules/@nodelib/fs.stat/out/index.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.statSync = exports.stat = exports.Settings = void 0;
|
||||
const async = require("./providers/async");
|
||||
const sync = require("./providers/sync");
|
||||
const settings_1 = require("./settings");
|
||||
exports.Settings = settings_1.default;
|
||||
function stat(path, optionsOrSettingsOrCallback, callback) {
|
||||
if (typeof optionsOrSettingsOrCallback === 'function') {
|
||||
async.read(path, getSettings(), optionsOrSettingsOrCallback);
|
||||
return;
|
||||
}
|
||||
async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports.stat = stat;
|
||||
function statSync(path, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path, settings);
|
||||
}
|
||||
exports.statSync = statSync;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
}
|
||||
return new settings_1.default(settingsOrOptions);
|
||||
}
|
4
node_modules/@nodelib/fs.stat/out/providers/async.d.ts
generated
vendored
Normal file
4
node_modules/@nodelib/fs.stat/out/providers/async.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import type Settings from '../settings';
|
||||
import type { ErrnoException, Stats } from '../types';
|
||||
export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void;
|
||||
export declare function read(path: string, settings: Settings, callback: AsyncCallback): void;
|
36
node_modules/@nodelib/fs.stat/out/providers/async.js
generated
vendored
Normal file
36
node_modules/@nodelib/fs.stat/out/providers/async.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.read = void 0;
|
||||
function read(path, settings, callback) {
|
||||
settings.fs.lstat(path, (lstatError, lstat) => {
|
||||
if (lstatError !== null) {
|
||||
callFailureCallback(callback, lstatError);
|
||||
return;
|
||||
}
|
||||
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
|
||||
callSuccessCallback(callback, lstat);
|
||||
return;
|
||||
}
|
||||
settings.fs.stat(path, (statError, stat) => {
|
||||
if (statError !== null) {
|
||||
if (settings.throwErrorOnBrokenSymbolicLink) {
|
||||
callFailureCallback(callback, statError);
|
||||
return;
|
||||
}
|
||||
callSuccessCallback(callback, lstat);
|
||||
return;
|
||||
}
|
||||
if (settings.markSymbolicLink) {
|
||||
stat.isSymbolicLink = () => true;
|
||||
}
|
||||
callSuccessCallback(callback, stat);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.read = read;
|
||||
function callFailureCallback(callback, error) {
|
||||
callback(error);
|
||||
}
|
||||
function callSuccessCallback(callback, result) {
|
||||
callback(null, result);
|
||||
}
|
3
node_modules/@nodelib/fs.stat/out/providers/sync.d.ts
generated
vendored
Normal file
3
node_modules/@nodelib/fs.stat/out/providers/sync.d.ts
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
import type Settings from '../settings';
|
||||
import type { Stats } from '../types';
|
||||
export declare function read(path: string, settings: Settings): Stats;
|
23
node_modules/@nodelib/fs.stat/out/providers/sync.js
generated
vendored
Normal file
23
node_modules/@nodelib/fs.stat/out/providers/sync.js
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.read = void 0;
|
||||
function read(path, settings) {
|
||||
const lstat = settings.fs.lstatSync(path);
|
||||
if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) {
|
||||
return lstat;
|
||||
}
|
||||
try {
|
||||
const stat = settings.fs.statSync(path);
|
||||
if (settings.markSymbolicLink) {
|
||||
stat.isSymbolicLink = () => true;
|
||||
}
|
||||
return stat;
|
||||
}
|
||||
catch (error) {
|
||||
if (!settings.throwErrorOnBrokenSymbolicLink) {
|
||||
return lstat;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
exports.read = read;
|
16
node_modules/@nodelib/fs.stat/out/settings.d.ts
generated
vendored
Normal file
16
node_modules/@nodelib/fs.stat/out/settings.d.ts
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
import * as fs from './adapters/fs';
|
||||
export interface Options {
|
||||
followSymbolicLink?: boolean;
|
||||
fs?: Partial<fs.FileSystemAdapter>;
|
||||
markSymbolicLink?: boolean;
|
||||
throwErrorOnBrokenSymbolicLink?: boolean;
|
||||
}
|
||||
export default class Settings {
|
||||
private readonly _options;
|
||||
readonly followSymbolicLink: boolean;
|
||||
readonly fs: fs.FileSystemAdapter;
|
||||
readonly markSymbolicLink: boolean;
|
||||
readonly throwErrorOnBrokenSymbolicLink: boolean;
|
||||
constructor(_options?: Options);
|
||||
private _getValue;
|
||||
}
|
16
node_modules/@nodelib/fs.stat/out/settings.js
generated
vendored
Normal file
16
node_modules/@nodelib/fs.stat/out/settings.js
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("./adapters/fs");
|
||||
class Settings {
|
||||
constructor(_options = {}) {
|
||||
this._options = _options;
|
||||
this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
|
||||
this.fs = fs.createFileSystemAdapter(this._options.fs);
|
||||
this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
|
||||
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
|
||||
}
|
||||
_getValue(option, value) {
|
||||
return option !== null && option !== void 0 ? option : value;
|
||||
}
|
||||
}
|
||||
exports.default = Settings;
|
4
node_modules/@nodelib/fs.stat/out/types/index.d.ts
generated
vendored
Normal file
4
node_modules/@nodelib/fs.stat/out/types/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/// <reference types="node" />
|
||||
import type * as fs from 'fs';
|
||||
export declare type Stats = fs.Stats;
|
||||
export declare type ErrnoException = NodeJS.ErrnoException;
|
2
node_modules/@nodelib/fs.stat/out/types/index.js
generated
vendored
Normal file
2
node_modules/@nodelib/fs.stat/out/types/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
37
node_modules/@nodelib/fs.stat/package.json
generated
vendored
Normal file
37
node_modules/@nodelib/fs.stat/package.json
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "@nodelib/fs.stat",
|
||||
"version": "2.0.5",
|
||||
"description": "Get the status of a file with some features",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat",
|
||||
"keywords": [
|
||||
"NodeLib",
|
||||
"fs",
|
||||
"FileSystem",
|
||||
"file system",
|
||||
"stat"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
},
|
||||
"files": [
|
||||
"out/**",
|
||||
"!out/**/*.map",
|
||||
"!out/**/*.spec.*"
|
||||
],
|
||||
"main": "out/index.js",
|
||||
"typings": "out/index.d.ts",
|
||||
"scripts": {
|
||||
"clean": "rimraf {tsconfig.tsbuildinfo,out}",
|
||||
"lint": "eslint \"src/**/*.ts\" --cache",
|
||||
"compile": "tsc -b .",
|
||||
"compile:watch": "tsc -p . --watch --sourceMap",
|
||||
"test": "mocha \"out/**/*.spec.js\" -s 0",
|
||||
"build": "npm run clean && npm run compile && npm run lint && npm test",
|
||||
"watch": "npm run clean && npm run compile:watch"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nodelib/fs.macchiato": "1.0.4"
|
||||
},
|
||||
"gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562"
|
||||
}
|
21
node_modules/@nodelib/fs.walk/LICENSE
generated
vendored
Normal file
21
node_modules/@nodelib/fs.walk/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Denis Malinochkin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
215
node_modules/@nodelib/fs.walk/README.md
generated
vendored
Normal file
215
node_modules/@nodelib/fs.walk/README.md
generated
vendored
Normal file
@ -0,0 +1,215 @@
|
||||
# @nodelib/fs.walk
|
||||
|
||||
> A library for efficiently walking a directory recursively.
|
||||
|
||||
## :bulb: Highlights
|
||||
|
||||
* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional).
|
||||
* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode).
|
||||
* :gear: Built-in directories/files and error filtering system.
|
||||
* :link: Can safely work with broken symbolic links.
|
||||
|
||||
## Install
|
||||
|
||||
```console
|
||||
npm install @nodelib/fs.walk
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import * as fsWalk from '@nodelib/fs.walk';
|
||||
|
||||
fsWalk.walk('path', (error, entries) => { /* … */ });
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### .walk(path, [optionsOrSettings], callback)
|
||||
|
||||
Reads the directory recursively and asynchronously. Requires a callback function.
|
||||
|
||||
> :book: If you want to use the Promise API, use `util.promisify`.
|
||||
|
||||
```ts
|
||||
fsWalk.walk('path', (error, entries) => { /* … */ });
|
||||
fsWalk.walk('path', {}, (error, entries) => { /* … */ });
|
||||
fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ });
|
||||
```
|
||||
|
||||
### .walkStream(path, [optionsOrSettings])
|
||||
|
||||
Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider.
|
||||
|
||||
```ts
|
||||
const stream = fsWalk.walkStream('path');
|
||||
const stream = fsWalk.walkStream('path', {});
|
||||
const stream = fsWalk.walkStream('path', new fsWalk.Settings());
|
||||
```
|
||||
|
||||
### .walkSync(path, [optionsOrSettings])
|
||||
|
||||
Reads the directory recursively and synchronously. Returns an array of entries.
|
||||
|
||||
```ts
|
||||
const entries = fsWalk.walkSync('path');
|
||||
const entries = fsWalk.walkSync('path', {});
|
||||
const entries = fsWalk.walkSync('path', new fsWalk.Settings());
|
||||
```
|
||||
|
||||
#### path
|
||||
|
||||
* Required: `true`
|
||||
* Type: `string | Buffer | URL`
|
||||
|
||||
A path to a file. If a URL is provided, it must use the `file:` protocol.
|
||||
|
||||
#### optionsOrSettings
|
||||
|
||||
* Required: `false`
|
||||
* Type: `Options | Settings`
|
||||
* Default: An instance of `Settings` class
|
||||
|
||||
An [`Options`](#options) object or an instance of [`Settings`](#settings) class.
|
||||
|
||||
> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class.
|
||||
|
||||
### Settings([options])
|
||||
|
||||
A class of full settings of the package.
|
||||
|
||||
```ts
|
||||
const settings = new fsWalk.Settings({ followSymbolicLinks: true });
|
||||
|
||||
const entries = fsWalk.walkSync('path', settings);
|
||||
```
|
||||
|
||||
## Entry
|
||||
|
||||
* `name` — The name of the entry (`unknown.txt`).
|
||||
* `path` — The path of the entry relative to call directory (`root/unknown.txt`).
|
||||
* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class.
|
||||
* [`stats`] — An instance of `fs.Stats` class.
|
||||
|
||||
## Options
|
||||
|
||||
### basePath
|
||||
|
||||
* Type: `string`
|
||||
* Default: `undefined`
|
||||
|
||||
By default, all paths are built relative to the root path. You can use this option to set custom root path.
|
||||
|
||||
In the example below we read the files from the `root` directory, but in the results the root path will be `custom`.
|
||||
|
||||
```ts
|
||||
fsWalk.walkSync('root'); // → ['root/file.txt']
|
||||
fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt']
|
||||
```
|
||||
|
||||
### concurrency
|
||||
|
||||
* Type: `number`
|
||||
* Default: `Infinity`
|
||||
|
||||
The maximum number of concurrent calls to `fs.readdir`.
|
||||
|
||||
> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)).
|
||||
|
||||
### deepFilter
|
||||
|
||||
* Type: [`DeepFilterFunction`](./src/settings.ts)
|
||||
* Default: `undefined`
|
||||
|
||||
A function that indicates whether the directory will be read deep or not.
|
||||
|
||||
```ts
|
||||
// Skip all directories that starts with `node_modules`
|
||||
const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules');
|
||||
```
|
||||
|
||||
### entryFilter
|
||||
|
||||
* Type: [`EntryFilterFunction`](./src/settings.ts)
|
||||
* Default: `undefined`
|
||||
|
||||
A function that indicates whether the entry will be included to results or not.
|
||||
|
||||
```ts
|
||||
// Exclude all `.js` files from results
|
||||
const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js');
|
||||
```
|
||||
|
||||
### errorFilter
|
||||
|
||||
* Type: [`ErrorFilterFunction`](./src/settings.ts)
|
||||
* Default: `undefined`
|
||||
|
||||
A function that allows you to skip errors that occur when reading directories.
|
||||
|
||||
For example, you can skip `ENOENT` errors if required:
|
||||
|
||||
```ts
|
||||
// Skip all ENOENT errors
|
||||
const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT';
|
||||
```
|
||||
|
||||
### stats
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `false`
|
||||
|
||||
Adds an instance of `fs.Stats` class to the [`Entry`](#entry).
|
||||
|
||||
> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type.
|
||||
|
||||
### followSymbolicLinks
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `false`
|
||||
|
||||
Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`.
|
||||
|
||||
### `throwErrorOnBrokenSymbolicLink`
|
||||
|
||||
* Type: `boolean`
|
||||
* Default: `true`
|
||||
|
||||
Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`.
|
||||
|
||||
### `pathSegmentSeparator`
|
||||
|
||||
* Type: `string`
|
||||
* Default: `path.sep`
|
||||
|
||||
By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead.
|
||||
|
||||
### `fs`
|
||||
|
||||
* Type: `FileSystemAdapter`
|
||||
* Default: A default FS methods
|
||||
|
||||
By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own.
|
||||
|
||||
```ts
|
||||
interface FileSystemAdapter {
|
||||
lstat: typeof fs.lstat;
|
||||
stat: typeof fs.stat;
|
||||
lstatSync: typeof fs.lstatSync;
|
||||
statSync: typeof fs.statSync;
|
||||
readdir: typeof fs.readdir;
|
||||
readdirSync: typeof fs.readdirSync;
|
||||
}
|
||||
|
||||
const settings = new fsWalk.Settings({
|
||||
fs: { lstat: fakeLstat }
|
||||
});
|
||||
```
|
||||
|
||||
## Changelog
|
||||
|
||||
See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version.
|
||||
|
||||
## License
|
||||
|
||||
This software is released under the terms of the MIT license.
|
14
node_modules/@nodelib/fs.walk/out/index.d.ts
generated
vendored
Normal file
14
node_modules/@nodelib/fs.walk/out/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
/// <reference types="node" />
|
||||
import type { Readable } from 'stream';
|
||||
import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir';
|
||||
import { AsyncCallback } from './providers/async';
|
||||
import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings';
|
||||
import type { Entry } from './types';
|
||||
declare function walk(directory: string, callback: AsyncCallback): void;
|
||||
declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void;
|
||||
declare namespace walk {
|
||||
function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise<Entry[]>;
|
||||
}
|
||||
declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[];
|
||||
declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable;
|
||||
export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction };
|
34
node_modules/@nodelib/fs.walk/out/index.js
generated
vendored
Normal file
34
node_modules/@nodelib/fs.walk/out/index.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0;
|
||||
const async_1 = require("./providers/async");
|
||||
const stream_1 = require("./providers/stream");
|
||||
const sync_1 = require("./providers/sync");
|
||||
const settings_1 = require("./settings");
|
||||
exports.Settings = settings_1.default;
|
||||
function walk(directory, optionsOrSettingsOrCallback, callback) {
|
||||
if (typeof optionsOrSettingsOrCallback === 'function') {
|
||||
new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback);
|
||||
return;
|
||||
}
|
||||
new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback);
|
||||
}
|
||||
exports.walk = walk;
|
||||
function walkSync(directory, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
const provider = new sync_1.default(directory, settings);
|
||||
return provider.read();
|
||||
}
|
||||
exports.walkSync = walkSync;
|
||||
function walkStream(directory, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
const provider = new stream_1.default(directory, settings);
|
||||
return provider.read();
|
||||
}
|
||||
exports.walkStream = walkStream;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
}
|
||||
return new settings_1.default(settingsOrOptions);
|
||||
}
|
12
node_modules/@nodelib/fs.walk/out/providers/async.d.ts
generated
vendored
Normal file
12
node_modules/@nodelib/fs.walk/out/providers/async.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
import AsyncReader from '../readers/async';
|
||||
import type Settings from '../settings';
|
||||
import type { Entry, Errno } from '../types';
|
||||
export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void;
|
||||
export default class AsyncProvider {
|
||||
private readonly _root;
|
||||
private readonly _settings;
|
||||
protected readonly _reader: AsyncReader;
|
||||
private readonly _storage;
|
||||
constructor(_root: string, _settings: Settings);
|
||||
read(callback: AsyncCallback): void;
|
||||
}
|
30
node_modules/@nodelib/fs.walk/out/providers/async.js
generated
vendored
Normal file
30
node_modules/@nodelib/fs.walk/out/providers/async.js
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const async_1 = require("../readers/async");
|
||||
class AsyncProvider {
|
||||
constructor(_root, _settings) {
|
||||
this._root = _root;
|
||||
this._settings = _settings;
|
||||
this._reader = new async_1.default(this._root, this._settings);
|
||||
this._storage = [];
|
||||
}
|
||||
read(callback) {
|
||||
this._reader.onError((error) => {
|
||||
callFailureCallback(callback, error);
|
||||
});
|
||||
this._reader.onEntry((entry) => {
|
||||
this._storage.push(entry);
|
||||
});
|
||||
this._reader.onEnd(() => {
|
||||
callSuccessCallback(callback, this._storage);
|
||||
});
|
||||
this._reader.read();
|
||||
}
|
||||
}
|
||||
exports.default = AsyncProvider;
|
||||
function callFailureCallback(callback, error) {
|
||||
callback(error);
|
||||
}
|
||||
function callSuccessCallback(callback, entries) {
|
||||
callback(null, entries);
|
||||
}
|
4
node_modules/@nodelib/fs.walk/out/providers/index.d.ts
generated
vendored
Normal file
4
node_modules/@nodelib/fs.walk/out/providers/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import AsyncProvider from './async';
|
||||
import StreamProvider from './stream';
|
||||
import SyncProvider from './sync';
|
||||
export { AsyncProvider, StreamProvider, SyncProvider };
|
9
node_modules/@nodelib/fs.walk/out/providers/index.js
generated
vendored
Normal file
9
node_modules/@nodelib/fs.walk/out/providers/index.js
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0;
|
||||
const async_1 = require("./async");
|
||||
exports.AsyncProvider = async_1.default;
|
||||
const stream_1 = require("./stream");
|
||||
exports.StreamProvider = stream_1.default;
|
||||
const sync_1 = require("./sync");
|
||||
exports.SyncProvider = sync_1.default;
|
12
node_modules/@nodelib/fs.walk/out/providers/stream.d.ts
generated
vendored
Normal file
12
node_modules/@nodelib/fs.walk/out/providers/stream.d.ts
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
/// <reference types="node" />
|
||||
import { Readable } from 'stream';
|
||||
import AsyncReader from '../readers/async';
|
||||
import type Settings from '../settings';
|
||||
export default class StreamProvider {
|
||||
private readonly _root;
|
||||
private readonly _settings;
|
||||
protected readonly _reader: AsyncReader;
|
||||
protected readonly _stream: Readable;
|
||||
constructor(_root: string, _settings: Settings);
|
||||
read(): Readable;
|
||||
}
|
34
node_modules/@nodelib/fs.walk/out/providers/stream.js
generated
vendored
Normal file
34
node_modules/@nodelib/fs.walk/out/providers/stream.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const stream_1 = require("stream");
|
||||
const async_1 = require("../readers/async");
|
||||
class StreamProvider {
|
||||
constructor(_root, _settings) {
|
||||
this._root = _root;
|
||||
this._settings = _settings;
|
||||
this._reader = new async_1.default(this._root, this._settings);
|
||||
this._stream = new stream_1.Readable({
|
||||
objectMode: true,
|
||||
read: () => { },
|
||||
destroy: () => {
|
||||
if (!this._reader.isDestroyed) {
|
||||
this._reader.destroy();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
read() {
|
||||
this._reader.onError((error) => {
|
||||
this._stream.emit('error', error);
|
||||
});
|
||||
this._reader.onEntry((entry) => {
|
||||
this._stream.push(entry);
|
||||
});
|
||||
this._reader.onEnd(() => {
|
||||
this._stream.push(null);
|
||||
});
|
||||
this._reader.read();
|
||||
return this._stream;
|
||||
}
|
||||
}
|
||||
exports.default = StreamProvider;
|
10
node_modules/@nodelib/fs.walk/out/providers/sync.d.ts
generated
vendored
Normal file
10
node_modules/@nodelib/fs.walk/out/providers/sync.d.ts
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
import SyncReader from '../readers/sync';
|
||||
import type Settings from '../settings';
|
||||
import type { Entry } from '../types';
|
||||
export default class SyncProvider {
|
||||
private readonly _root;
|
||||
private readonly _settings;
|
||||
protected readonly _reader: SyncReader;
|
||||
constructor(_root: string, _settings: Settings);
|
||||
read(): Entry[];
|
||||
}
|
14
node_modules/@nodelib/fs.walk/out/providers/sync.js
generated
vendored
Normal file
14
node_modules/@nodelib/fs.walk/out/providers/sync.js
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const sync_1 = require("../readers/sync");
|
||||
class SyncProvider {
|
||||
constructor(_root, _settings) {
|
||||
this._root = _root;
|
||||
this._settings = _settings;
|
||||
this._reader = new sync_1.default(this._root, this._settings);
|
||||
}
|
||||
read() {
|
||||
return this._reader.read();
|
||||
}
|
||||
}
|
||||
exports.default = SyncProvider;
|
30
node_modules/@nodelib/fs.walk/out/readers/async.d.ts
generated
vendored
Normal file
30
node_modules/@nodelib/fs.walk/out/readers/async.d.ts
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
/// <reference types="node" />
|
||||
import { EventEmitter } from 'events';
|
||||
import * as fsScandir from '@nodelib/fs.scandir';
|
||||
import type Settings from '../settings';
|
||||
import type { Entry, Errno } from '../types';
|
||||
import Reader from './reader';
|
||||
declare type EntryEventCallback = (entry: Entry) => void;
|
||||
declare type ErrorEventCallback = (error: Errno) => void;
|
||||
declare type EndEventCallback = () => void;
|
||||
export default class AsyncReader extends Reader {
|
||||
protected readonly _settings: Settings;
|
||||
protected readonly _scandir: typeof fsScandir.scandir;
|
||||
protected readonly _emitter: EventEmitter;
|
||||
private readonly _queue;
|
||||
private _isFatalError;
|
||||
private _isDestroyed;
|
||||
constructor(_root: string, _settings: Settings);
|
||||
read(): EventEmitter;
|
||||
get isDestroyed(): boolean;
|
||||
destroy(): void;
|
||||
onEntry(callback: EntryEventCallback): void;
|
||||
onError(callback: ErrorEventCallback): void;
|
||||
onEnd(callback: EndEventCallback): void;
|
||||
private _pushToQueue;
|
||||
private _worker;
|
||||
private _handleError;
|
||||
private _handleEntry;
|
||||
private _emitEntry;
|
||||
}
|
||||
export {};
|
97
node_modules/@nodelib/fs.walk/out/readers/async.js
generated
vendored
Normal file
97
node_modules/@nodelib/fs.walk/out/readers/async.js
generated
vendored
Normal file
@ -0,0 +1,97 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const events_1 = require("events");
|
||||
const fsScandir = require("@nodelib/fs.scandir");
|
||||
const fastq = require("fastq");
|
||||
const common = require("./common");
|
||||
const reader_1 = require("./reader");
|
||||
class AsyncReader extends reader_1.default {
|
||||
constructor(_root, _settings) {
|
||||
super(_root, _settings);
|
||||
this._settings = _settings;
|
||||
this._scandir = fsScandir.scandir;
|
||||
this._emitter = new events_1.EventEmitter();
|
||||
this._queue = fastq(this._worker.bind(this), this._settings.concurrency);
|
||||
this._isFatalError = false;
|
||||
this._isDestroyed = false;
|
||||
this._queue.drain = () => {
|
||||
if (!this._isFatalError) {
|
||||
this._emitter.emit('end');
|
||||
}
|
||||
};
|
||||
}
|
||||
read() {
|
||||
this._isFatalError = false;
|
||||
this._isDestroyed = false;
|
||||
setImmediate(() => {
|
||||
this._pushToQueue(this._root, this._settings.basePath);
|
||||
});
|
||||
return this._emitter;
|
||||
}
|
||||
get isDestroyed() {
|
||||
return this._isDestroyed;
|
||||
}
|
||||
destroy() {
|
||||
if (this._isDestroyed) {
|
||||
throw new Error('The reader is already destroyed');
|
||||
}
|
||||
this._isDestroyed = true;
|
||||
this._queue.killAndDrain();
|
||||
}
|
||||
onEntry(callback) {
|
||||
this._emitter.on('entry', callback);
|
||||
}
|
||||
onError(callback) {
|
||||
this._emitter.once('error', callback);
|
||||
}
|
||||
onEnd(callback) {
|
||||
this._emitter.once('end', callback);
|
||||
}
|
||||
_pushToQueue(directory, base) {
|
||||
const queueItem = { directory, base };
|
||||
this._queue.push(queueItem, (error) => {
|
||||
if (error !== null) {
|
||||
this._handleError(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
_worker(item, done) {
|
||||
this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => {
|
||||
if (error !== null) {
|
||||
done(error, undefined);
|
||||
return;
|
||||
}
|
||||
for (const entry of entries) {
|
||||
this._handleEntry(entry, item.base);
|
||||
}
|
||||
done(null, undefined);
|
||||
});
|
||||
}
|
||||
_handleError(error) {
|
||||
if (this._isDestroyed || !common.isFatalError(this._settings, error)) {
|
||||
return;
|
||||
}
|
||||
this._isFatalError = true;
|
||||
this._isDestroyed = true;
|
||||
this._emitter.emit('error', error);
|
||||
}
|
||||
_handleEntry(entry, base) {
|
||||
if (this._isDestroyed || this._isFatalError) {
|
||||
return;
|
||||
}
|
||||
const fullpath = entry.path;
|
||||
if (base !== undefined) {
|
||||
entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
|
||||
}
|
||||
if (common.isAppliedFilter(this._settings.entryFilter, entry)) {
|
||||
this._emitEntry(entry);
|
||||
}
|
||||
if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {
|
||||
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
|
||||
}
|
||||
}
|
||||
_emitEntry(entry) {
|
||||
this._emitter.emit('entry', entry);
|
||||
}
|
||||
}
|
||||
exports.default = AsyncReader;
|
7
node_modules/@nodelib/fs.walk/out/readers/common.d.ts
generated
vendored
Normal file
7
node_modules/@nodelib/fs.walk/out/readers/common.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
import type { FilterFunction } from '../settings';
|
||||
import type Settings from '../settings';
|
||||
import type { Errno } from '../types';
|
||||
export declare function isFatalError(settings: Settings, error: Errno): boolean;
|
||||
export declare function isAppliedFilter<T>(filter: FilterFunction<T> | null, value: T): boolean;
|
||||
export declare function replacePathSegmentSeparator(filepath: string, separator: string): string;
|
||||
export declare function joinPathSegments(a: string, b: string, separator: string): string;
|
31
node_modules/@nodelib/fs.walk/out/readers/common.js
generated
vendored
Normal file
31
node_modules/@nodelib/fs.walk/out/readers/common.js
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0;
|
||||
function isFatalError(settings, error) {
|
||||
if (settings.errorFilter === null) {
|
||||
return true;
|
||||
}
|
||||
return !settings.errorFilter(error);
|
||||
}
|
||||
exports.isFatalError = isFatalError;
|
||||
function isAppliedFilter(filter, value) {
|
||||
return filter === null || filter(value);
|
||||
}
|
||||
exports.isAppliedFilter = isAppliedFilter;
|
||||
function replacePathSegmentSeparator(filepath, separator) {
|
||||
return filepath.split(/[/\\]/).join(separator);
|
||||
}
|
||||
exports.replacePathSegmentSeparator = replacePathSegmentSeparator;
|
||||
function joinPathSegments(a, b, separator) {
|
||||
if (a === '') {
|
||||
return b;
|
||||
}
|
||||
/**
|
||||
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
|
||||
*/
|
||||
if (a.endsWith(separator)) {
|
||||
return a + b;
|
||||
}
|
||||
return a + separator + b;
|
||||
}
|
||||
exports.joinPathSegments = joinPathSegments;
|
6
node_modules/@nodelib/fs.walk/out/readers/reader.d.ts
generated
vendored
Normal file
6
node_modules/@nodelib/fs.walk/out/readers/reader.d.ts
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
import type Settings from '../settings';
|
||||
export default class Reader {
|
||||
protected readonly _root: string;
|
||||
protected readonly _settings: Settings;
|
||||
constructor(_root: string, _settings: Settings);
|
||||
}
|
11
node_modules/@nodelib/fs.walk/out/readers/reader.js
generated
vendored
Normal file
11
node_modules/@nodelib/fs.walk/out/readers/reader.js
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const common = require("./common");
|
||||
class Reader {
|
||||
constructor(_root, _settings) {
|
||||
this._root = _root;
|
||||
this._settings = _settings;
|
||||
this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator);
|
||||
}
|
||||
}
|
||||
exports.default = Reader;
|
15
node_modules/@nodelib/fs.walk/out/readers/sync.d.ts
generated
vendored
Normal file
15
node_modules/@nodelib/fs.walk/out/readers/sync.d.ts
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
import * as fsScandir from '@nodelib/fs.scandir';
|
||||
import type { Entry } from '../types';
|
||||
import Reader from './reader';
|
||||
export default class SyncReader extends Reader {
|
||||
protected readonly _scandir: typeof fsScandir.scandirSync;
|
||||
private readonly _storage;
|
||||
private readonly _queue;
|
||||
read(): Entry[];
|
||||
private _pushToQueue;
|
||||
private _handleQueue;
|
||||
private _handleDirectory;
|
||||
private _handleError;
|
||||
private _handleEntry;
|
||||
private _pushToStorage;
|
||||
}
|
59
node_modules/@nodelib/fs.walk/out/readers/sync.js
generated
vendored
Normal file
59
node_modules/@nodelib/fs.walk/out/readers/sync.js
generated
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fsScandir = require("@nodelib/fs.scandir");
|
||||
const common = require("./common");
|
||||
const reader_1 = require("./reader");
|
||||
class SyncReader extends reader_1.default {
|
||||
constructor() {
|
||||
super(...arguments);
|
||||
this._scandir = fsScandir.scandirSync;
|
||||
this._storage = [];
|
||||
this._queue = new Set();
|
||||
}
|
||||
read() {
|
||||
this._pushToQueue(this._root, this._settings.basePath);
|
||||
this._handleQueue();
|
||||
return this._storage;
|
||||
}
|
||||
_pushToQueue(directory, base) {
|
||||
this._queue.add({ directory, base });
|
||||
}
|
||||
_handleQueue() {
|
||||
for (const item of this._queue.values()) {
|
||||
this._handleDirectory(item.directory, item.base);
|
||||
}
|
||||
}
|
||||
_handleDirectory(directory, base) {
|
||||
try {
|
||||
const entries = this._scandir(directory, this._settings.fsScandirSettings);
|
||||
for (const entry of entries) {
|
||||
this._handleEntry(entry, base);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
this._handleError(error);
|
||||
}
|
||||
}
|
||||
_handleError(error) {
|
||||
if (!common.isFatalError(this._settings, error)) {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
_handleEntry(entry, base) {
|
||||
const fullpath = entry.path;
|
||||
if (base !== undefined) {
|
||||
entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator);
|
||||
}
|
||||
if (common.isAppliedFilter(this._settings.entryFilter, entry)) {
|
||||
this._pushToStorage(entry);
|
||||
}
|
||||
if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) {
|
||||
this._pushToQueue(fullpath, base === undefined ? undefined : entry.path);
|
||||
}
|
||||
}
|
||||
_pushToStorage(entry) {
|
||||
this._storage.push(entry);
|
||||
}
|
||||
}
|
||||
exports.default = SyncReader;
|
30
node_modules/@nodelib/fs.walk/out/settings.d.ts
generated
vendored
Normal file
30
node_modules/@nodelib/fs.walk/out/settings.d.ts
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
import * as fsScandir from '@nodelib/fs.scandir';
|
||||
import type { Entry, Errno } from './types';
|
||||
export declare type FilterFunction<T> = (value: T) => boolean;
|
||||
export declare type DeepFilterFunction = FilterFunction<Entry>;
|
||||
export declare type EntryFilterFunction = FilterFunction<Entry>;
|
||||
export declare type ErrorFilterFunction = FilterFunction<Errno>;
|
||||
export interface Options {
|
||||
basePath?: string;
|
||||
concurrency?: number;
|
||||
deepFilter?: DeepFilterFunction;
|
||||
entryFilter?: EntryFilterFunction;
|
||||
errorFilter?: ErrorFilterFunction;
|
||||
followSymbolicLinks?: boolean;
|
||||
fs?: Partial<fsScandir.FileSystemAdapter>;
|
||||
pathSegmentSeparator?: string;
|
||||
stats?: boolean;
|
||||
throwErrorOnBrokenSymbolicLink?: boolean;
|
||||
}
|
||||
export default class Settings {
|
||||
private readonly _options;
|
||||
readonly basePath?: string;
|
||||
readonly concurrency: number;
|
||||
readonly deepFilter: DeepFilterFunction | null;
|
||||
readonly entryFilter: EntryFilterFunction | null;
|
||||
readonly errorFilter: ErrorFilterFunction | null;
|
||||
readonly pathSegmentSeparator: string;
|
||||
readonly fsScandirSettings: fsScandir.Settings;
|
||||
constructor(_options?: Options);
|
||||
private _getValue;
|
||||
}
|
26
node_modules/@nodelib/fs.walk/out/settings.js
generated
vendored
Normal file
26
node_modules/@nodelib/fs.walk/out/settings.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const fsScandir = require("@nodelib/fs.scandir");
|
||||
class Settings {
|
||||
constructor(_options = {}) {
|
||||
this._options = _options;
|
||||
this.basePath = this._getValue(this._options.basePath, undefined);
|
||||
this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY);
|
||||
this.deepFilter = this._getValue(this._options.deepFilter, null);
|
||||
this.entryFilter = this._getValue(this._options.entryFilter, null);
|
||||
this.errorFilter = this._getValue(this._options.errorFilter, null);
|
||||
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
|
||||
this.fsScandirSettings = new fsScandir.Settings({
|
||||
followSymbolicLinks: this._options.followSymbolicLinks,
|
||||
fs: this._options.fs,
|
||||
pathSegmentSeparator: this._options.pathSegmentSeparator,
|
||||
stats: this._options.stats,
|
||||
throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink
|
||||
});
|
||||
}
|
||||
_getValue(option, value) {
|
||||
return option !== null && option !== void 0 ? option : value;
|
||||
}
|
||||
}
|
||||
exports.default = Settings;
|
8
node_modules/@nodelib/fs.walk/out/types/index.d.ts
generated
vendored
Normal file
8
node_modules/@nodelib/fs.walk/out/types/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
/// <reference types="node" />
|
||||
import type * as scandir from '@nodelib/fs.scandir';
|
||||
export declare type Entry = scandir.Entry;
|
||||
export declare type Errno = NodeJS.ErrnoException;
|
||||
export interface QueueItem {
|
||||
directory: string;
|
||||
base?: string;
|
||||
}
|
2
node_modules/@nodelib/fs.walk/out/types/index.js
generated
vendored
Normal file
2
node_modules/@nodelib/fs.walk/out/types/index.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
44
node_modules/@nodelib/fs.walk/package.json
generated
vendored
Normal file
44
node_modules/@nodelib/fs.walk/package.json
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"name": "@nodelib/fs.walk",
|
||||
"version": "1.2.8",
|
||||
"description": "A library for efficiently walking a directory recursively",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk",
|
||||
"keywords": [
|
||||
"NodeLib",
|
||||
"fs",
|
||||
"FileSystem",
|
||||
"file system",
|
||||
"walk",
|
||||
"scanner",
|
||||
"crawler"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
},
|
||||
"files": [
|
||||
"out/**",
|
||||
"!out/**/*.map",
|
||||
"!out/**/*.spec.*",
|
||||
"!out/**/tests/**"
|
||||
],
|
||||
"main": "out/index.js",
|
||||
"typings": "out/index.d.ts",
|
||||
"scripts": {
|
||||
"clean": "rimraf {tsconfig.tsbuildinfo,out}",
|
||||
"lint": "eslint \"src/**/*.ts\" --cache",
|
||||
"compile": "tsc -b .",
|
||||
"compile:watch": "tsc -p . --watch --sourceMap",
|
||||
"test": "mocha \"out/**/*.spec.js\" -s 0",
|
||||
"build": "npm run clean && npm run compile && npm run lint && npm test",
|
||||
"watch": "npm run clean && npm run compile:watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@nodelib/fs.scandir": "2.1.5",
|
||||
"fastq": "^1.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nodelib/fs.macchiato": "1.0.4"
|
||||
},
|
||||
"gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8"
|
||||
}
|
14
node_modules/@pkgjs/parseargs/.editorconfig
generated
vendored
Normal file
14
node_modules/@pkgjs/parseargs/.editorconfig
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
# EditorConfig is awesome: http://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Copied from Node.js to ease compatibility in PR.
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
quote_type = single
|
147
node_modules/@pkgjs/parseargs/CHANGELOG.md
generated
vendored
Normal file
147
node_modules/@pkgjs/parseargs/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,147 @@
|
||||
# Changelog
|
||||
|
||||
## [0.11.0](https://github.com/pkgjs/parseargs/compare/v0.10.0...v0.11.0) (2022-10-08)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add `default` option parameter ([#142](https://github.com/pkgjs/parseargs/issues/142)) ([cd20847](https://github.com/pkgjs/parseargs/commit/cd20847a00b2f556aa9c085ac83b942c60868ec1))
|
||||
|
||||
## [0.10.0](https://github.com/pkgjs/parseargs/compare/v0.9.1...v0.10.0) (2022-07-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* add parsed meta-data to returned properties ([#129](https://github.com/pkgjs/parseargs/issues/129)) ([91bfb4d](https://github.com/pkgjs/parseargs/commit/91bfb4d3f7b6937efab1b27c91c45d1205f1497e))
|
||||
|
||||
## [0.9.1](https://github.com/pkgjs/parseargs/compare/v0.9.0...v0.9.1) (2022-06-20)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **runtime:** support node 14+ ([#135](https://github.com/pkgjs/parseargs/issues/135)) ([6a1c5a6](https://github.com/pkgjs/parseargs/commit/6a1c5a6f7cadf2f035e004027e2742e3c4ce554b))
|
||||
|
||||
## [0.9.0](https://github.com/pkgjs/parseargs/compare/v0.8.0...v0.9.0) (2022-05-23)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* drop handling of electron arguments (#121)
|
||||
|
||||
### Code Refactoring
|
||||
|
||||
* drop handling of electron arguments ([#121](https://github.com/pkgjs/parseargs/issues/121)) ([a2ffd53](https://github.com/pkgjs/parseargs/commit/a2ffd537c244a062371522b955acb45a404fc9f2))
|
||||
|
||||
## [0.8.0](https://github.com/pkgjs/parseargs/compare/v0.7.1...v0.8.0) (2022-05-16)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* switch type:string option arguments to greedy, but with error for suspect cases in strict mode (#88)
|
||||
* positionals now opt-in when strict:true (#116)
|
||||
* create result.values with null prototype (#111)
|
||||
|
||||
### Features
|
||||
|
||||
* create result.values with null prototype ([#111](https://github.com/pkgjs/parseargs/issues/111)) ([9d539c3](https://github.com/pkgjs/parseargs/commit/9d539c3d57f269c160e74e0656ad4fa84ff92ec2))
|
||||
* positionals now opt-in when strict:true ([#116](https://github.com/pkgjs/parseargs/issues/116)) ([3643338](https://github.com/pkgjs/parseargs/commit/364333826b746e8a7dc5505b4b22fd19ac51df3b))
|
||||
* switch type:string option arguments to greedy, but with error for suspect cases in strict mode ([#88](https://github.com/pkgjs/parseargs/issues/88)) ([c2b5e72](https://github.com/pkgjs/parseargs/commit/c2b5e72161991dfdc535909f1327cc9b970fe7e8))
|
||||
|
||||
### [0.7.1](https://github.com/pkgjs/parseargs/compare/v0.7.0...v0.7.1) (2022-04-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* resist pollution ([#106](https://github.com/pkgjs/parseargs/issues/106)) ([ecf2dec](https://github.com/pkgjs/parseargs/commit/ecf2dece0a9f2a76d789384d5d71c68ffe64022a))
|
||||
|
||||
## [0.7.0](https://github.com/pkgjs/parseargs/compare/v0.6.0...v0.7.0) (2022-04-13)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add strict mode to parser ([#74](https://github.com/pkgjs/parseargs/issues/74)) ([8267d02](https://github.com/pkgjs/parseargs/commit/8267d02083a87b8b8a71fcce08348d1e031ea91c))
|
||||
|
||||
## [0.6.0](https://github.com/pkgjs/parseargs/compare/v0.5.0...v0.6.0) (2022-04-11)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* rework results to remove redundant `flags` property and store value true for boolean options (#83)
|
||||
* switch to existing ERR_INVALID_ARG_VALUE (#97)
|
||||
|
||||
### Code Refactoring
|
||||
|
||||
* rework results to remove redundant `flags` property and store value true for boolean options ([#83](https://github.com/pkgjs/parseargs/issues/83)) ([be153db](https://github.com/pkgjs/parseargs/commit/be153dbed1d488cb7b6e27df92f601ba7337713d))
|
||||
* switch to existing ERR_INVALID_ARG_VALUE ([#97](https://github.com/pkgjs/parseargs/issues/97)) ([084a23f](https://github.com/pkgjs/parseargs/commit/084a23f9fde2da030b159edb1c2385f24579ce40))
|
||||
|
||||
## [0.5.0](https://github.com/pkgjs/parseargs/compare/v0.4.0...v0.5.0) (2022-04-10)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* Require type to be specified for each supplied option (#95)
|
||||
|
||||
### Features
|
||||
|
||||
* Require type to be specified for each supplied option ([#95](https://github.com/pkgjs/parseargs/issues/95)) ([02cd018](https://github.com/pkgjs/parseargs/commit/02cd01885b8aaa59f2db8308f2d4479e64340068))
|
||||
|
||||
## [0.4.0](https://github.com/pkgjs/parseargs/compare/v0.3.0...v0.4.0) (2022-03-12)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* parsing, revisit short option groups, add support for combined short and value (#75)
|
||||
* restructure configuration to take options bag (#63)
|
||||
|
||||
### Code Refactoring
|
||||
|
||||
* parsing, revisit short option groups, add support for combined short and value ([#75](https://github.com/pkgjs/parseargs/issues/75)) ([a92600f](https://github.com/pkgjs/parseargs/commit/a92600fa6c214508ab1e016fa55879a314f541af))
|
||||
* restructure configuration to take options bag ([#63](https://github.com/pkgjs/parseargs/issues/63)) ([b412095](https://github.com/pkgjs/parseargs/commit/b4120957d90e809ee8b607b06e747d3e6a6b213e))
|
||||
|
||||
## [0.3.0](https://github.com/pkgjs/parseargs/compare/v0.2.0...v0.3.0) (2022-02-06)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **parser:** support short-option groups ([#59](https://github.com/pkgjs/parseargs/issues/59)) ([882067b](https://github.com/pkgjs/parseargs/commit/882067bc2d7cbc6b796f8e5a079a99bc99d4e6ba))
|
||||
|
||||
## [0.2.0](https://github.com/pkgjs/parseargs/compare/v0.1.1...v0.2.0) (2022-02-05)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* basic support for shorts ([#50](https://github.com/pkgjs/parseargs/issues/50)) ([a2f36d7](https://github.com/pkgjs/parseargs/commit/a2f36d7da4145af1c92f76806b7fe2baf6beeceb))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* always store value for a=b ([#43](https://github.com/pkgjs/parseargs/issues/43)) ([a85e8dc](https://github.com/pkgjs/parseargs/commit/a85e8dc06379fd2696ee195cc625de8fac6aee42))
|
||||
* support single dash as positional ([#49](https://github.com/pkgjs/parseargs/issues/49)) ([d795bf8](https://github.com/pkgjs/parseargs/commit/d795bf877d068fd67aec381f30b30b63f97109ad))
|
||||
|
||||
### [0.1.1](https://github.com/pkgjs/parseargs/compare/v0.1.0...v0.1.1) (2022-01-25)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* only use arrays in results for multiples ([#42](https://github.com/pkgjs/parseargs/issues/42)) ([c357584](https://github.com/pkgjs/parseargs/commit/c357584847912506319ed34a0840080116f4fd65))
|
||||
|
||||
## 0.1.0 (2022-01-22)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* expand scenarios covered by default arguments for environments ([#20](https://github.com/pkgjs/parseargs/issues/20)) ([582ada7](https://github.com/pkgjs/parseargs/commit/582ada7be0eca3a73d6e0bd016e7ace43449fa4c))
|
||||
* update readme and include contributing guidelines ([8edd6fc](https://github.com/pkgjs/parseargs/commit/8edd6fc863cd705f6fac732724159ebe8065a2b0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* do not strip excess leading dashes on long option names ([#21](https://github.com/pkgjs/parseargs/issues/21)) ([f848590](https://github.com/pkgjs/parseargs/commit/f848590ebf3249ed5979ff47e003fa6e1a8ec5c0))
|
||||
* name & readme ([3f057c1](https://github.com/pkgjs/parseargs/commit/3f057c1b158a1bdbe878c64b57460c58e56e465f))
|
||||
* package.json values ([9bac300](https://github.com/pkgjs/parseargs/commit/9bac300e00cd76c77076bf9e75e44f8929512da9))
|
||||
* update readme name ([957d8d9](https://github.com/pkgjs/parseargs/commit/957d8d96e1dcb48297c0a14345d44c0123b2883e))
|
||||
|
||||
|
||||
### Build System
|
||||
|
||||
* first release as minor ([421c6e2](https://github.com/pkgjs/parseargs/commit/421c6e2569a8668ad14fac5a5af5be60479a7571))
|
201
node_modules/@pkgjs/parseargs/LICENSE
generated
vendored
Normal file
201
node_modules/@pkgjs/parseargs/LICENSE
generated
vendored
Normal file
@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
413
node_modules/@pkgjs/parseargs/README.md
generated
vendored
Normal file
413
node_modules/@pkgjs/parseargs/README.md
generated
vendored
Normal file
@ -0,0 +1,413 @@
|
||||
<!-- omit in toc -->
|
||||
# parseArgs
|
||||
|
||||
[![Coverage][coverage-image]][coverage-url]
|
||||
|
||||
Polyfill of `util.parseArgs()`
|
||||
|
||||
## `util.parseArgs([config])`
|
||||
|
||||
<!-- YAML
|
||||
added: v18.3.0
|
||||
changes:
|
||||
- version: REPLACEME
|
||||
pr-url: https://github.com/nodejs/node/pull/43459
|
||||
description: add support for returning detailed parse information
|
||||
using `tokens` in input `config` and returned properties.
|
||||
-->
|
||||
|
||||
> Stability: 1 - Experimental
|
||||
|
||||
* `config` {Object} Used to provide arguments for parsing and to configure
|
||||
the parser. `config` supports the following properties:
|
||||
* `args` {string\[]} array of argument strings. **Default:** `process.argv`
|
||||
with `execPath` and `filename` removed.
|
||||
* `options` {Object} Used to describe arguments known to the parser.
|
||||
Keys of `options` are the long names of options and values are an
|
||||
{Object} accepting the following properties:
|
||||
* `type` {string} Type of argument, which must be either `boolean` or `string`.
|
||||
* `multiple` {boolean} Whether this option can be provided multiple
|
||||
times. If `true`, all values will be collected in an array. If
|
||||
`false`, values for the option are last-wins. **Default:** `false`.
|
||||
* `short` {string} A single character alias for the option.
|
||||
* `default` {string | boolean | string\[] | boolean\[]} The default option
|
||||
value when it is not set by args. It must be of the same type as the
|
||||
the `type` property. When `multiple` is `true`, it must be an array.
|
||||
* `strict` {boolean} Should an error be thrown when unknown arguments
|
||||
are encountered, or when arguments are passed that do not match the
|
||||
`type` configured in `options`.
|
||||
**Default:** `true`.
|
||||
* `allowPositionals` {boolean} Whether this command accepts positional
|
||||
arguments.
|
||||
**Default:** `false` if `strict` is `true`, otherwise `true`.
|
||||
* `tokens` {boolean} Return the parsed tokens. This is useful for extending
|
||||
the built-in behavior, from adding additional checks through to reprocessing
|
||||
the tokens in different ways.
|
||||
**Default:** `false`.
|
||||
|
||||
* Returns: {Object} The parsed command line arguments:
|
||||
* `values` {Object} A mapping of parsed option names with their {string}
|
||||
or {boolean} values.
|
||||
* `positionals` {string\[]} Positional arguments.
|
||||
* `tokens` {Object\[] | undefined} See [parseArgs tokens](#parseargs-tokens)
|
||||
section. Only returned if `config` includes `tokens: true`.
|
||||
|
||||
Provides a higher level API for command-line argument parsing than interacting
|
||||
with `process.argv` directly. Takes a specification for the expected arguments
|
||||
and returns a structured object with the parsed options and positionals.
|
||||
|
||||
```mjs
|
||||
import { parseArgs } from 'node:util';
|
||||
const args = ['-f', '--bar', 'b'];
|
||||
const options = {
|
||||
foo: {
|
||||
type: 'boolean',
|
||||
short: 'f'
|
||||
},
|
||||
bar: {
|
||||
type: 'string'
|
||||
}
|
||||
};
|
||||
const {
|
||||
values,
|
||||
positionals
|
||||
} = parseArgs({ args, options });
|
||||
console.log(values, positionals);
|
||||
// Prints: [Object: null prototype] { foo: true, bar: 'b' } []
|
||||
```
|
||||
|
||||
```cjs
|
||||
const { parseArgs } = require('node:util');
|
||||
const args = ['-f', '--bar', 'b'];
|
||||
const options = {
|
||||
foo: {
|
||||
type: 'boolean',
|
||||
short: 'f'
|
||||
},
|
||||
bar: {
|
||||
type: 'string'
|
||||
}
|
||||
};
|
||||
const {
|
||||
values,
|
||||
positionals
|
||||
} = parseArgs({ args, options });
|
||||
console.log(values, positionals);
|
||||
// Prints: [Object: null prototype] { foo: true, bar: 'b' } []
|
||||
```
|
||||
|
||||
`util.parseArgs` is experimental and behavior may change. Join the
|
||||
conversation in [pkgjs/parseargs][] to contribute to the design.
|
||||
|
||||
### `parseArgs` `tokens`
|
||||
|
||||
Detailed parse information is available for adding custom behaviours by
|
||||
specifying `tokens: true` in the configuration.
|
||||
The returned tokens have properties describing:
|
||||
|
||||
* all tokens
|
||||
* `kind` {string} One of 'option', 'positional', or 'option-terminator'.
|
||||
* `index` {number} Index of element in `args` containing token. So the
|
||||
source argument for a token is `args[token.index]`.
|
||||
* option tokens
|
||||
* `name` {string} Long name of option.
|
||||
* `rawName` {string} How option used in args, like `-f` of `--foo`.
|
||||
* `value` {string | undefined} Option value specified in args.
|
||||
Undefined for boolean options.
|
||||
* `inlineValue` {boolean | undefined} Whether option value specified inline,
|
||||
like `--foo=bar`.
|
||||
* positional tokens
|
||||
* `value` {string} The value of the positional argument in args (i.e. `args[index]`).
|
||||
* option-terminator token
|
||||
|
||||
The returned tokens are in the order encountered in the input args. Options
|
||||
that appear more than once in args produce a token for each use. Short option
|
||||
groups like `-xy` expand to a token for each option. So `-xxx` produces
|
||||
three tokens.
|
||||
|
||||
For example to use the returned tokens to add support for a negated option
|
||||
like `--no-color`, the tokens can be reprocessed to change the value stored
|
||||
for the negated option.
|
||||
|
||||
```mjs
|
||||
import { parseArgs } from 'node:util';
|
||||
|
||||
const options = {
|
||||
'color': { type: 'boolean' },
|
||||
'no-color': { type: 'boolean' },
|
||||
'logfile': { type: 'string' },
|
||||
'no-logfile': { type: 'boolean' },
|
||||
};
|
||||
const { values, tokens } = parseArgs({ options, tokens: true });
|
||||
|
||||
// Reprocess the option tokens and overwrite the returned values.
|
||||
tokens
|
||||
.filter((token) => token.kind === 'option')
|
||||
.forEach((token) => {
|
||||
if (token.name.startsWith('no-')) {
|
||||
// Store foo:false for --no-foo
|
||||
const positiveName = token.name.slice(3);
|
||||
values[positiveName] = false;
|
||||
delete values[token.name];
|
||||
} else {
|
||||
// Resave value so last one wins if both --foo and --no-foo.
|
||||
values[token.name] = token.value ?? true;
|
||||
}
|
||||
});
|
||||
|
||||
const color = values.color;
|
||||
const logfile = values.logfile ?? 'default.log';
|
||||
|
||||
console.log({ logfile, color });
|
||||
```
|
||||
|
||||
```cjs
|
||||
const { parseArgs } = require('node:util');
|
||||
|
||||
const options = {
|
||||
'color': { type: 'boolean' },
|
||||
'no-color': { type: 'boolean' },
|
||||
'logfile': { type: 'string' },
|
||||
'no-logfile': { type: 'boolean' },
|
||||
};
|
||||
const { values, tokens } = parseArgs({ options, tokens: true });
|
||||
|
||||
// Reprocess the option tokens and overwrite the returned values.
|
||||
tokens
|
||||
.filter((token) => token.kind === 'option')
|
||||
.forEach((token) => {
|
||||
if (token.name.startsWith('no-')) {
|
||||
// Store foo:false for --no-foo
|
||||
const positiveName = token.name.slice(3);
|
||||
values[positiveName] = false;
|
||||
delete values[token.name];
|
||||
} else {
|
||||
// Resave value so last one wins if both --foo and --no-foo.
|
||||
values[token.name] = token.value ?? true;
|
||||
}
|
||||
});
|
||||
|
||||
const color = values.color;
|
||||
const logfile = values.logfile ?? 'default.log';
|
||||
|
||||
console.log({ logfile, color });
|
||||
```
|
||||
|
||||
Example usage showing negated options, and when an option is used
|
||||
multiple ways then last one wins.
|
||||
|
||||
```console
|
||||
$ node negate.js
|
||||
{ logfile: 'default.log', color: undefined }
|
||||
$ node negate.js --no-logfile --no-color
|
||||
{ logfile: false, color: false }
|
||||
$ node negate.js --logfile=test.log --color
|
||||
{ logfile: 'test.log', color: true }
|
||||
$ node negate.js --no-logfile --logfile=test.log --color --no-color
|
||||
{ logfile: 'test.log', color: false }
|
||||
```
|
||||
|
||||
-----
|
||||
|
||||
<!-- omit in toc -->
|
||||
## Table of Contents
|
||||
- [`util.parseArgs([config])`](#utilparseargsconfig)
|
||||
- [Scope](#scope)
|
||||
- [Version Matchups](#version-matchups)
|
||||
- [🚀 Getting Started](#-getting-started)
|
||||
- [🙌 Contributing](#-contributing)
|
||||
- [💡 `process.mainArgs` Proposal](#-processmainargs-proposal)
|
||||
- [Implementation:](#implementation)
|
||||
- [📃 Examples](#-examples)
|
||||
- [F.A.Qs](#faqs)
|
||||
- [Links & Resources](#links--resources)
|
||||
|
||||
-----
|
||||
|
||||
## Scope
|
||||
|
||||
It is already possible to build great arg parsing modules on top of what Node.js provides; the prickly API is abstracted away by these modules. Thus, process.parseArgs() is not necessarily intended for library authors; it is intended for developers of simple CLI tools, ad-hoc scripts, deployed Node.js applications, and learning materials.
|
||||
|
||||
It is exceedingly difficult to provide an API which would both be friendly to these Node.js users while being extensible enough for libraries to build upon. We chose to prioritize these use cases because these are currently not well-served by Node.js' API.
|
||||
|
||||
----
|
||||
|
||||
## Version Matchups
|
||||
|
||||
| Node.js | @pkgjs/parseArgs |
|
||||
| -- | -- |
|
||||
| [v18.3.0](https://nodejs.org/docs/latest-v18.x/api/util.html#utilparseargsconfig) | [v0.9.1](https://github.com/pkgjs/parseargs/tree/v0.9.1#utilparseargsconfig) |
|
||||
| [v16.17.0](https://nodejs.org/dist/latest-v16.x/docs/api/util.html#utilparseargsconfig), [v18.7.0](https://nodejs.org/docs/latest-v18.x/api/util.html#utilparseargsconfig) | [0.10.0](https://github.com/pkgjs/parseargs/tree/v0.10.0#utilparseargsconfig) |
|
||||
|
||||
----
|
||||
|
||||
## 🚀 Getting Started
|
||||
|
||||
1. **Install dependencies.**
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
2. **Open the index.js file and start editing!**
|
||||
|
||||
3. **Test your code by calling parseArgs through our test file**
|
||||
|
||||
```bash
|
||||
npm test
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
## 🙌 Contributing
|
||||
|
||||
Any person who wants to contribute to the initiative is welcome! Please first read the [Contributing Guide](CONTRIBUTING.md)
|
||||
|
||||
Additionally, reading the [`Examples w/ Output`](#-examples-w-output) section of this document will be the best way to familiarize yourself with the target expected behavior for parseArgs() once it is fully implemented.
|
||||
|
||||
This package was implemented using [tape](https://www.npmjs.com/package/tape) as its test harness.
|
||||
|
||||
----
|
||||
|
||||
## 💡 `process.mainArgs` Proposal
|
||||
|
||||
> Note: This can be moved forward independently of the `util.parseArgs()` proposal/work.
|
||||
|
||||
### Implementation:
|
||||
|
||||
```javascript
|
||||
process.mainArgs = process.argv.slice(process._exec ? 1 : 2)
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
## 📃 Examples
|
||||
|
||||
```js
|
||||
const { parseArgs } = require('@pkgjs/parseargs');
|
||||
```
|
||||
|
||||
```js
|
||||
const { parseArgs } = require('@pkgjs/parseargs');
|
||||
// specify the options that may be used
|
||||
const options = {
|
||||
foo: { type: 'string'},
|
||||
bar: { type: 'boolean' },
|
||||
};
|
||||
const args = ['--foo=a', '--bar'];
|
||||
const { values, positionals } = parseArgs({ args, options });
|
||||
// values = { foo: 'a', bar: true }
|
||||
// positionals = []
|
||||
```
|
||||
|
||||
```js
|
||||
const { parseArgs } = require('@pkgjs/parseargs');
|
||||
// type:string & multiple
|
||||
const options = {
|
||||
foo: {
|
||||
type: 'string',
|
||||
multiple: true,
|
||||
},
|
||||
};
|
||||
const args = ['--foo=a', '--foo', 'b'];
|
||||
const { values, positionals } = parseArgs({ args, options });
|
||||
// values = { foo: [ 'a', 'b' ] }
|
||||
// positionals = []
|
||||
```
|
||||
|
||||
```js
|
||||
const { parseArgs } = require('@pkgjs/parseargs');
|
||||
// shorts
|
||||
const options = {
|
||||
foo: {
|
||||
short: 'f',
|
||||
type: 'boolean'
|
||||
},
|
||||
};
|
||||
const args = ['-f', 'b'];
|
||||
const { values, positionals } = parseArgs({ args, options, allowPositionals: true });
|
||||
// values = { foo: true }
|
||||
// positionals = ['b']
|
||||
```
|
||||
|
||||
```js
|
||||
const { parseArgs } = require('@pkgjs/parseargs');
|
||||
// unconfigured
|
||||
const options = {};
|
||||
const args = ['-f', '--foo=a', '--bar', 'b'];
|
||||
const { values, positionals } = parseArgs({ strict: false, args, options, allowPositionals: true });
|
||||
// values = { f: true, foo: 'a', bar: true }
|
||||
// positionals = ['b']
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
## F.A.Qs
|
||||
|
||||
- Is `cmd --foo=bar baz` the same as `cmd baz --foo=bar`?
|
||||
- yes
|
||||
- Does the parser execute a function?
|
||||
- no
|
||||
- Does the parser execute one of several functions, depending on input?
|
||||
- no
|
||||
- Can subcommands take options that are distinct from the main command?
|
||||
- no
|
||||
- Does it output generated help when no options match?
|
||||
- no
|
||||
- Does it generated short usage? Like: `usage: ls [-ABCFGHLOPRSTUWabcdefghiklmnopqrstuwx1] [file ...]`
|
||||
- no (no usage/help at all)
|
||||
- Does the user provide the long usage text? For each option? For the whole command?
|
||||
- no
|
||||
- Do subcommands (if implemented) have their own usage output?
|
||||
- no
|
||||
- Does usage print if the user runs `cmd --help`?
|
||||
- no
|
||||
- Does it set `process.exitCode`?
|
||||
- no
|
||||
- Does usage print to stderr or stdout?
|
||||
- N/A
|
||||
- Does it check types? (Say, specify that an option is a boolean, number, etc.)
|
||||
- no
|
||||
- Can an option have more than one type? (string or false, for example)
|
||||
- no
|
||||
- Can the user define a type? (Say, `type: path` to call `path.resolve()` on the argument.)
|
||||
- no
|
||||
- Does a `--foo=0o22` mean 0, 22, 18, or "0o22"?
|
||||
- `"0o22"`
|
||||
- Does it coerce types?
|
||||
- no
|
||||
- Does `--no-foo` coerce to `--foo=false`? For all options? Only boolean options?
|
||||
- no, it sets `{values:{'no-foo': true}}`
|
||||
- Is `--foo` the same as `--foo=true`? Only for known booleans? Only at the end?
|
||||
- no, they are not the same. There is no special handling of `true` as a value so it is just another string.
|
||||
- Does it read environment variables? Ie, is `FOO=1 cmd` the same as `cmd --foo=1`?
|
||||
- no
|
||||
- Do unknown arguments raise an error? Are they parsed? Are they treated as positional arguments?
|
||||
- no, they are parsed, not treated as positionals
|
||||
- Does `--` signal the end of options?
|
||||
- yes
|
||||
- Is `--` included as a positional?
|
||||
- no
|
||||
- Is `program -- foo` the same as `program foo`?
|
||||
- yes, both store `{positionals:['foo']}`
|
||||
- Does the API specify whether a `--` was present/relevant?
|
||||
- no
|
||||
- Is `-bar` the same as `--bar`?
|
||||
- no, `-bar` is a short option or options, with expansion logic that follows the
|
||||
[Utility Syntax Guidelines in POSIX.1-2017](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html). `-bar` expands to `-b`, `-a`, `-r`.
|
||||
- Is `---foo` the same as `--foo`?
|
||||
- no
|
||||
- the first is a long option named `'-foo'`
|
||||
- the second is a long option named `'foo'`
|
||||
- Is `-` a positional? ie, `bash some-test.sh | tap -`
|
||||
- yes
|
||||
|
||||
## Links & Resources
|
||||
|
||||
* [Initial Tooling Issue](https://github.com/nodejs/tooling/issues/19)
|
||||
* [Initial Proposal](https://github.com/nodejs/node/pull/35015)
|
||||
* [parseArgs Proposal](https://github.com/nodejs/node/pull/42675)
|
||||
|
||||
[coverage-image]: https://img.shields.io/nycrc/pkgjs/parseargs
|
||||
[coverage-url]: https://github.com/pkgjs/parseargs/blob/main/.nycrc
|
||||
[pkgjs/parseargs]: https://github.com/pkgjs/parseargs
|
25
node_modules/@pkgjs/parseargs/examples/is-default-value.js
generated
vendored
Normal file
25
node_modules/@pkgjs/parseargs/examples/is-default-value.js
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
|
||||
// This example shows how to understand if a default value is used or not.
|
||||
|
||||
// 1. const { parseArgs } = require('node:util'); // from node
|
||||
// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
|
||||
const { parseArgs } = require('..'); // in repo
|
||||
|
||||
const options = {
|
||||
file: { short: 'f', type: 'string', default: 'FOO' },
|
||||
};
|
||||
|
||||
const { values, tokens } = parseArgs({ options, tokens: true });
|
||||
|
||||
const isFileDefault = !tokens.some((token) => token.kind === 'option' &&
|
||||
token.name === 'file'
|
||||
);
|
||||
|
||||
console.log(values);
|
||||
console.log(`Is the file option [${values.file}] the default value? ${isFileDefault}`);
|
||||
|
||||
// Try the following:
|
||||
// node is-default-value.js
|
||||
// node is-default-value.js -f FILE
|
||||
// node is-default-value.js --file FILE
|
35
node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js
generated
vendored
Normal file
35
node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
'use strict';
|
||||
|
||||
// This is an example of using tokens to add a custom behaviour.
|
||||
//
|
||||
// Require the use of `=` for long options and values by blocking
|
||||
// the use of space separated values.
|
||||
// So allow `--foo=bar`, and not allow `--foo bar`.
|
||||
//
|
||||
// Note: this is not a common behaviour, most CLIs allow both forms.
|
||||
|
||||
// 1. const { parseArgs } = require('node:util'); // from node
|
||||
// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
|
||||
const { parseArgs } = require('..'); // in repo
|
||||
|
||||
const options = {
|
||||
file: { short: 'f', type: 'string' },
|
||||
log: { type: 'string' },
|
||||
};
|
||||
|
||||
const { values, tokens } = parseArgs({ options, tokens: true });
|
||||
|
||||
const badToken = tokens.find((token) => token.kind === 'option' &&
|
||||
token.value != null &&
|
||||
token.rawName.startsWith('--') &&
|
||||
!token.inlineValue
|
||||
);
|
||||
if (badToken) {
|
||||
throw new Error(`Option value for '${badToken.rawName}' must be inline, like '${badToken.rawName}=VALUE'`);
|
||||
}
|
||||
|
||||
console.log(values);
|
||||
|
||||
// Try the following:
|
||||
// node limit-long-syntax.js -f FILE --log=LOG
|
||||
// node limit-long-syntax.js --file FILE
|
43
node_modules/@pkgjs/parseargs/examples/negate.js
generated
vendored
Normal file
43
node_modules/@pkgjs/parseargs/examples/negate.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
'use strict';
|
||||
|
||||
// This example is used in the documentation.
|
||||
|
||||
// How might I add my own support for --no-foo?
|
||||
|
||||
// 1. const { parseArgs } = require('node:util'); // from node
|
||||
// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
|
||||
const { parseArgs } = require('..'); // in repo
|
||||
|
||||
const options = {
|
||||
'color': { type: 'boolean' },
|
||||
'no-color': { type: 'boolean' },
|
||||
'logfile': { type: 'string' },
|
||||
'no-logfile': { type: 'boolean' },
|
||||
};
|
||||
const { values, tokens } = parseArgs({ options, tokens: true });
|
||||
|
||||
// Reprocess the option tokens and overwrite the returned values.
|
||||
tokens
|
||||
.filter((token) => token.kind === 'option')
|
||||
.forEach((token) => {
|
||||
if (token.name.startsWith('no-')) {
|
||||
// Store foo:false for --no-foo
|
||||
const positiveName = token.name.slice(3);
|
||||
values[positiveName] = false;
|
||||
delete values[token.name];
|
||||
} else {
|
||||
// Resave value so last one wins if both --foo and --no-foo.
|
||||
values[token.name] = token.value ?? true;
|
||||
}
|
||||
});
|
||||
|
||||
const color = values.color;
|
||||
const logfile = values.logfile ?? 'default.log';
|
||||
|
||||
console.log({ logfile, color });
|
||||
|
||||
// Try the following:
|
||||
// node negate.js
|
||||
// node negate.js --no-logfile --no-color
|
||||
// negate.js --logfile=test.log --color
|
||||
// node negate.js --no-logfile --logfile=test.log --color --no-color
|
31
node_modules/@pkgjs/parseargs/examples/no-repeated-options.js
generated
vendored
Normal file
31
node_modules/@pkgjs/parseargs/examples/no-repeated-options.js
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
// This is an example of using tokens to add a custom behaviour.
|
||||
//
|
||||
// Throw an error if an option is used more than once.
|
||||
|
||||
// 1. const { parseArgs } = require('node:util'); // from node
|
||||
// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
|
||||
const { parseArgs } = require('..'); // in repo
|
||||
|
||||
const options = {
|
||||
ding: { type: 'boolean', short: 'd' },
|
||||
beep: { type: 'boolean', short: 'b' }
|
||||
};
|
||||
const { values, tokens } = parseArgs({ options, tokens: true });
|
||||
|
||||
const seenBefore = new Set();
|
||||
tokens.forEach((token) => {
|
||||
if (token.kind !== 'option') return;
|
||||
if (seenBefore.has(token.name)) {
|
||||
throw new Error(`option '${token.name}' used multiple times`);
|
||||
}
|
||||
seenBefore.add(token.name);
|
||||
});
|
||||
|
||||
console.log(values);
|
||||
|
||||
// Try the following:
|
||||
// node no-repeated-options --ding --beep
|
||||
// node no-repeated-options --beep -b
|
||||
// node no-repeated-options -ddd
|
41
node_modules/@pkgjs/parseargs/examples/ordered-options.mjs
generated
vendored
Normal file
41
node_modules/@pkgjs/parseargs/examples/ordered-options.mjs
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
// This is an example of using tokens to add a custom behaviour.
|
||||
//
|
||||
// This adds a option order check so that --some-unstable-option
|
||||
// may only be used after --enable-experimental-options
|
||||
//
|
||||
// Note: this is not a common behaviour, the order of different options
|
||||
// does not usually matter.
|
||||
|
||||
import { parseArgs } from '../index.js';
|
||||
|
||||
function findTokenIndex(tokens, target) {
|
||||
return tokens.findIndex((token) => token.kind === 'option' &&
|
||||
token.name === target
|
||||
);
|
||||
}
|
||||
|
||||
const experimentalName = 'enable-experimental-options';
|
||||
const unstableName = 'some-unstable-option';
|
||||
|
||||
const options = {
|
||||
[experimentalName]: { type: 'boolean' },
|
||||
[unstableName]: { type: 'boolean' },
|
||||
};
|
||||
|
||||
const { values, tokens } = parseArgs({ options, tokens: true });
|
||||
|
||||
const experimentalIndex = findTokenIndex(tokens, experimentalName);
|
||||
const unstableIndex = findTokenIndex(tokens, unstableName);
|
||||
if (unstableIndex !== -1 &&
|
||||
((experimentalIndex === -1) || (unstableIndex < experimentalIndex))) {
|
||||
throw new Error(`'--${experimentalName}' must be specified before '--${unstableName}'`);
|
||||
}
|
||||
|
||||
console.log(values);
|
||||
|
||||
/* eslint-disable max-len */
|
||||
// Try the following:
|
||||
// node ordered-options.mjs
|
||||
// node ordered-options.mjs --some-unstable-option
|
||||
// node ordered-options.mjs --some-unstable-option --enable-experimental-options
|
||||
// node ordered-options.mjs --enable-experimental-options --some-unstable-option
|
26
node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js
generated
vendored
Normal file
26
node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
// This example is used in the documentation.
|
||||
|
||||
// 1. const { parseArgs } = require('node:util'); // from node
|
||||
// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package
|
||||
const { parseArgs } = require('..'); // in repo
|
||||
|
||||
const args = ['-f', '--bar', 'b'];
|
||||
const options = {
|
||||
foo: {
|
||||
type: 'boolean',
|
||||
short: 'f'
|
||||
},
|
||||
bar: {
|
||||
type: 'string'
|
||||
}
|
||||
};
|
||||
const {
|
||||
values,
|
||||
positionals
|
||||
} = parseArgs({ args, options });
|
||||
console.log(values, positionals);
|
||||
|
||||
// Try the following:
|
||||
// node simple-hard-coded.js
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user