new license file version [CI SKIP]
This commit is contained in:
@@ -0,0 +1,7 @@
|
||||
var HTTPS_RESOURCE_PATTERN = /^https:\/\//;
|
||||
|
||||
function isHttpsResource(uri) {
|
||||
return HTTPS_RESOURCE_PATTERN.test(uri);
|
||||
}
|
||||
|
||||
module.exports = isHttpsResource;
|
||||
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* The base implementation of `_.propertyOf` without support for deep paths.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} object The object to query.
|
||||
* @returns {Function} Returns the new accessor function.
|
||||
*/
|
||||
function basePropertyOf(object) {
|
||||
return function(key) {
|
||||
return object == null ? undefined : object[key];
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = basePropertyOf;
|
||||
@@ -0,0 +1,2 @@
|
||||
export declare function flatten<T>(items: T[][]): T[];
|
||||
export declare function splitWhen<T>(items: T[], predicate: (item: T) => boolean): T[][];
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={C:{"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0.00255,"53":0,"54":0,"55":0,"56":0,"57":0,"58":0,"59":0,"60":0,"61":0,"62":0,"63":0,"64":0,"65":0,"66":0,"67":0,"68":0,"69":0,"70":0,"71":0,"72":0,"73":0,"74":0,"75":0,"76":0,"77":0,"78":0.00255,"79":0,"80":0,"81":0,"82":0,"83":0,"84":0,"85":0,"86":0,"87":0,"88":0,"89":0,"90":0,"91":0,"92":0,"93":0,"94":0,"95":0,"96":0,"97":0,"98":0,"99":0,"100":0,"101":0,"102":0.00255,"103":0,"104":0,"105":0,"106":0.00255,"107":0.00255,"108":0.00509,"109":0.08653,"110":0.06108,"111":0,"112":0,"3.5":0,"3.6":0},D:{"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0.00255,"23":0,"24":0,"25":0,"26":0.00255,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0.00764,"35":0,"36":0,"37":0,"38":0.00764,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0.01018,"48":0,"49":0.01273,"50":0,"51":0,"52":0,"53":0.00255,"54":0,"55":0,"56":0,"57":0,"58":0,"59":0,"60":0.01527,"61":0,"62":0,"63":0,"64":0,"65":0.00255,"66":0,"67":0,"68":0.00509,"69":0.00255,"70":0.00255,"71":0.00255,"72":0,"73":0.00255,"74":0,"75":0.00255,"76":0,"77":0,"78":0.00255,"79":0.03818,"80":0.00764,"81":0.00509,"83":0.01273,"84":0.01018,"85":0.01527,"86":0.01273,"87":0.01273,"88":0.00509,"89":0.00255,"90":0.00255,"91":0.00509,"92":0.02036,"93":0.00255,"94":0.00255,"95":0.00509,"96":0.00764,"97":0.00509,"98":0.00255,"99":0.00509,"100":0.01018,"101":0.00255,"102":0.00509,"103":0.01782,"104":0.00764,"105":0.01018,"106":0.01273,"107":0.02291,"108":0.0789,"109":2.66971,"110":1.39975,"111":0,"112":0,"113":0},F:{"9":0,"11":0,"12":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0.00509,"29":0,"30":0,"31":0,"32":0.00255,"33":0,"34":0,"35":0,"36":0.00255,"37":0,"38":0,"39":0,"40":0.00764,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0.02036,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0,"53":0,"54":0,"55":0,"56":0,"57":0,"58":0,"60":0,"62":0,"63":0,"64":0,"65":0,"66":0.00255,"67":0.05599,"68":0,"69":0,"70":0,"71":0,"72":0,"73":0,"74":0.00255,"75":0,"76":0,"77":0,"78":0,"79":0,"80":0,"81":0,"82":0,"83":0,"84":0,"85":0.00255,"86":0,"87":0,"88":0,"89":0,"90":0,"91":0,"92":0,"93":0.028,"94":0.25959,"95":0.15016,"9.5-9.6":0,"10.0-10.1":0,"10.5":0,"10.6":0,"11.1":0,"11.5":0,"11.6":0,"12.1":0},B:{"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0.00509,"79":0,"80":0,"81":0,"83":0,"84":0,"85":0,"86":0,"87":0,"88":0,"89":0,"90":0,"91":0,"92":0.00255,"93":0,"94":0,"95":0,"96":0,"97":0,"98":0,"99":0,"100":0,"101":0,"102":0,"103":0,"104":0,"105":0,"106":0.00255,"107":0.00509,"108":0.00764,"109":0.16034,"110":0.23923},E:{"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0.00509,"15":0.00255,_:"0","3.1":0,"3.2":0,"5.1":0.00255,"6.1":0,"7.1":0,"9.1":0,"10.1":0,"11.1":0,"12.1":0.00255,"13.1":0.00764,"14.1":0.01273,"15.1":0.00255,"15.2-15.3":0.00255,"15.4":0.00509,"15.5":0.00764,"15.6":0.03818,"16.0":0.00509,"16.1":0.02036,"16.2":0.03818,"16.3":0.03054,"16.4":0},G:{"8":0,"3.2":0,"4.0-4.1":0,"4.2-4.3":0,"5.0-5.1":0,"6.0-6.1":0,"7.0-7.1":0.01864,"8.1-8.4":0,"9.0-9.2":0.00373,"9.3":0.03541,"10.0-10.2":0.00559,"10.3":0.1081,"11.0-11.2":0.00932,"11.3-11.4":0.01118,"12.0-12.1":0.0205,"12.2-12.5":1.43696,"13.0-13.1":0.01677,"13.2":0.00559,"13.3":0.04659,"13.4-13.7":0.16028,"14.0-14.4":0.32802,"14.5-14.8":0.75109,"15.0-15.1":0.11369,"15.2-15.3":0.17706,"15.4":0.2162,"15.5":0.42307,"15.6":1.17789,"16.0":1.42577,"16.1":3.38085,"16.2":3.5635,"16.3":2.33715,"16.4":0.00932},P:{"4":0.26302,"20":0.97115,"5.0-5.4":0.02023,"6.2-6.4":0,"7.2-7.4":0.16186,"8.2":0.01012,"9.2":0.03035,"10.1":0.01012,"11.1-11.2":0.0607,"12.0":0.02023,"13.0":0.08093,"14.0":0.03035,"15.0":0.02023,"16.0":0.07081,"17.0":0.19221,"18.0":0.11128,"19.0":1.80067},I:{"0":0,"3":0,"4":0,"2.1":0,"2.2":0,"2.3":0,"4.1":0.00132,"4.2-4.3":0.00659,"4.4":0,"4.4.3-4.4.4":0.03228},K:{_:"0 10 11 12 11.1 11.5 12.1"},A:{"6":0,"7":0,"8":0,"9":0,"10":0,"11":0.06872,"5.5":0},N:{"10":0,"11":0},S:{"2.5":0,_:"3.0-3.1"},J:{"7":0,"10":0},O:{"0":0.15656},H:{"0":0.86107},L:{"0":70.94851},R:{_:"0"},M:{"0":0.11183},Q:{"13.1":0}};
|
||||
@@ -0,0 +1,3 @@
|
||||
export type ResponsePrincipal = {
|
||||
id: number;
|
||||
};
|
||||
@@ -0,0 +1,34 @@
|
||||
base64-js
|
||||
=========
|
||||
|
||||
`base64-js` does basic base64 encoding/decoding in pure JS.
|
||||
|
||||
[](http://travis-ci.org/beatgammit/base64-js)
|
||||
|
||||
Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data.
|
||||
|
||||
Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does.
|
||||
|
||||
## install
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
`npm install base64-js` and `var base64js = require('base64-js')`
|
||||
|
||||
For use in web browsers do:
|
||||
|
||||
`<script src="base64js.min.js"></script>`
|
||||
|
||||
[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme)
|
||||
|
||||
## methods
|
||||
|
||||
`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument.
|
||||
|
||||
* `byteLength` - Takes a base64 string and returns length of byte array
|
||||
* `toByteArray` - Takes a base64 string and returns a byte array
|
||||
* `fromByteArray` - Takes a byte array and returns a base64 string
|
||||
|
||||
## license
|
||||
|
||||
MIT
|
||||
@@ -0,0 +1,6 @@
|
||||
export type UpdatePermission = {
|
||||
id: number;
|
||||
principal: number;
|
||||
target: string;
|
||||
action: string;
|
||||
};
|
||||
@@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
|
||||
var value = require("./valid-value")
|
||||
, objPropertyIsEnumerable = Object.prototype.propertyIsEnumerable;
|
||||
|
||||
module.exports = function (obj) {
|
||||
var i;
|
||||
value(obj);
|
||||
for (i in obj) {
|
||||
if (objPropertyIsEnumerable.call(obj, i)) return i;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
@@ -0,0 +1,50 @@
|
||||
name: ci
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
legacy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: ['0.10', '0.12', 4.x, 6.x, 8.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
npm install --production && npm install tape
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
npm run legacy
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [10.x, 12.x, 13.x, 14.x, 15.x, 16.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
npm install
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
npm run test
|
||||
@@ -0,0 +1,20 @@
|
||||
'use strict';
|
||||
|
||||
function posix(path) {
|
||||
return path.charAt(0) === '/';
|
||||
}
|
||||
|
||||
function win32(path) {
|
||||
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
|
||||
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/;
|
||||
var result = splitDeviceRe.exec(path);
|
||||
var device = result[1] || '';
|
||||
var isUnc = Boolean(device && device.charAt(1) !== ':');
|
||||
|
||||
// UNC paths are always absolute
|
||||
return Boolean(result[2] || isUnc);
|
||||
}
|
||||
|
||||
module.exports = process.platform === 'win32' ? win32 : posix;
|
||||
module.exports.posix = posix;
|
||||
module.exports.win32 = win32;
|
||||
@@ -0,0 +1,36 @@
|
||||
import { __extends } from "tslib";
|
||||
import { AsyncAction } from './AsyncAction';
|
||||
import { animationFrameProvider } from './animationFrameProvider';
|
||||
var AnimationFrameAction = (function (_super) {
|
||||
__extends(AnimationFrameAction, _super);
|
||||
function AnimationFrameAction(scheduler, work) {
|
||||
var _this = _super.call(this, scheduler, work) || this;
|
||||
_this.scheduler = scheduler;
|
||||
_this.work = work;
|
||||
return _this;
|
||||
}
|
||||
AnimationFrameAction.prototype.requestAsyncId = function (scheduler, id, delay) {
|
||||
if (delay === void 0) { delay = 0; }
|
||||
if (delay !== null && delay > 0) {
|
||||
return _super.prototype.requestAsyncId.call(this, scheduler, id, delay);
|
||||
}
|
||||
scheduler.actions.push(this);
|
||||
return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(function () { return scheduler.flush(undefined); }));
|
||||
};
|
||||
AnimationFrameAction.prototype.recycleAsyncId = function (scheduler, id, delay) {
|
||||
var _a;
|
||||
if (delay === void 0) { delay = 0; }
|
||||
if (delay != null ? delay > 0 : this.delay > 0) {
|
||||
return _super.prototype.recycleAsyncId.call(this, scheduler, id, delay);
|
||||
}
|
||||
var actions = scheduler.actions;
|
||||
if (id != null && ((_a = actions[actions.length - 1]) === null || _a === void 0 ? void 0 : _a.id) !== id) {
|
||||
animationFrameProvider.cancelAnimationFrame(id);
|
||||
scheduler._scheduled = undefined;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
return AnimationFrameAction;
|
||||
}(AsyncAction));
|
||||
export { AnimationFrameAction };
|
||||
//# sourceMappingURL=AnimationFrameAction.js.map
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,57 @@
|
||||
import { Transform } from "stream";
|
||||
|
||||
type JsonArray = boolean[] | number[] | string[] | JsonMap[] | Date[]
|
||||
type AnyJson = boolean | number | string | JsonMap | Date | JsonArray | JsonArray[]
|
||||
|
||||
interface JsonMap {
|
||||
[key: string]: AnyJson;
|
||||
}
|
||||
|
||||
interface ParseOptions {
|
||||
/**
|
||||
* The amount text to parser per pass through the event loop. Defaults to 40kb (`40000`).
|
||||
*/
|
||||
blocksize: number
|
||||
}
|
||||
|
||||
interface FuncParse {
|
||||
/**
|
||||
* Synchronously parse a TOML string and return an object.
|
||||
*/
|
||||
(toml: string): JsonMap
|
||||
|
||||
/**
|
||||
* Asynchronously parse a TOML string and return a promise of the resulting object.
|
||||
*/
|
||||
async (toml: string, options?: ParseOptions): Promise<JsonMap>
|
||||
|
||||
/**
|
||||
* Given a readable stream, parse it as it feeds us data. Return a promise of the resulting object.
|
||||
*/
|
||||
stream (readable: NodeJS.ReadableStream): Promise<JsonMap>
|
||||
stream (): Transform
|
||||
}
|
||||
|
||||
interface FuncStringify {
|
||||
/**
|
||||
* Serialize an object as TOML.
|
||||
*
|
||||
* If an object `TOML.stringify` is serializing has a `toJSON` method
|
||||
* then it will call it to transform the object before serializing it.
|
||||
* This matches the behavior of JSON.stringify.
|
||||
*
|
||||
* The one exception to this is that `toJSON` is not called for `Date` objects
|
||||
* because JSON represents dates as strings and TOML can represent them natively.
|
||||
*
|
||||
* `moment` objects are treated the same as native `Date` objects, in this respect.
|
||||
*/
|
||||
(obj: JsonMap): string
|
||||
|
||||
/**
|
||||
* Serialize a value as TOML would. This is a fragment and not a complete valid TOML document.
|
||||
*/
|
||||
value (any: AnyJson): string
|
||||
}
|
||||
|
||||
export const parse: FuncParse
|
||||
export const stringify: FuncStringify
|
||||
@@ -0,0 +1,168 @@
|
||||
[![Build status][build-image]][build-url]
|
||||
[![Tests coverage][cov-image]][cov-url]
|
||||
[![npm version][npm-image]][npm-url]
|
||||
|
||||
# type
|
||||
|
||||
## Runtime validation and processing of JavaScript types
|
||||
|
||||
- Respects language nature and acknowledges its quirks
|
||||
- Allows coercion in restricted forms (rejects clearly invalid input, normalizes permissible type deviations)
|
||||
- No transpilation implied, written to work in all ECMAScript 3+ engines
|
||||
|
||||
## Use case
|
||||
|
||||
Validate arguments input in public API endpoints.
|
||||
|
||||
_For validation of more sophisticated input structures (as deeply nested configuration objects) it's recommended to consider more powerful schema based utlities (as [AJV](https://ajv.js.org/) or [@hapi/joi](https://hapi.dev/family/joi/))_
|
||||
|
||||
### Example usage
|
||||
|
||||
Bulletproof input arguments normalization and validation:
|
||||
|
||||
```javascript
|
||||
const ensureString = require('type/string/ensure')
|
||||
, ensureDate = require('type/date/ensure')
|
||||
, ensureNaturalNumber = require('type/natural-number/ensure')
|
||||
, isObject = require('type/object/is');
|
||||
|
||||
module.exports = (path, options = { min: 0 }) {
|
||||
path = ensureString(path, { errorMessage: "%v is not a path" });
|
||||
if (!isObject(options)) options = {};
|
||||
const min = ensureNaturalNumber(options.min, { default: 0 })
|
||||
, max = ensureNaturalNumber(options.max, { isOptional: true })
|
||||
, startTime = ensureDate(options.startTime, { isOptional: true });
|
||||
|
||||
// ...logic
|
||||
};
|
||||
```
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
npm install type
|
||||
```
|
||||
|
||||
## Utilities
|
||||
|
||||
Aside of general [`ensure`](docs/ensure.md) validation util, following kind of utilities for recognized JavaScript types are provided:
|
||||
|
||||
##### `*/coerce`
|
||||
|
||||
Restricted coercion into primitive type. Returns coerced value or `null` if value is not coercible per rules.
|
||||
|
||||
##### `*/is`
|
||||
|
||||
Object type/kind confirmation, returns either `true` or `false`.
|
||||
|
||||
##### `*/ensure`
|
||||
|
||||
Value validation. Returns input value (in primitive cases possibly coerced) or if value doesn't meet the constraints throws `TypeError` .
|
||||
|
||||
Each `*/ensure` utility, accepts following options (eventually passed with second argument):
|
||||
|
||||
- `isOptional` - Makes `null` or `undefined` accepted as valid value. In such case instead of `TypeError` being thrown, `null` is returned.
|
||||
- `default` - A value to be returned if `null` or `undefined` is passed as an input value.
|
||||
- `errorMessage` - Custom error message. Following placeholders can be used:
|
||||
- `%v` - To be replaced with short string representation of invalid value
|
||||
- `%n` - To be replaced with meaninfgul name (to be passed with `name` option) of validated value. Not effective if `name` option is not present
|
||||
- `errorCode` - Eventual error code to be exposed on `.code` error property
|
||||
- `name` - Meaningful name for validated value, to be used in error message, assuming it contains `%n` placeholder
|
||||
- `Error` - Alternative error constructor to be used (defaults to `TypeError`)
|
||||
|
||||
### Index
|
||||
|
||||
#### General utils:
|
||||
|
||||
- [`ensure`](docs/ensure.md)
|
||||
|
||||
#### Type specific utils:
|
||||
|
||||
- **Value**
|
||||
- [`value/is`](docs/value.md#valueis)
|
||||
- [`value/ensure`](docs/value.md#valueensure)
|
||||
- **Object**
|
||||
- [`object/is`](docs/object.md#objectis)
|
||||
- [`object/ensure`](docs/object.md#objectensure)
|
||||
- **Plain Object**
|
||||
- [`plain-object/is`](docs/plain-object.md#plain-objectis)
|
||||
- [`plain-object/ensure`](docs/plain-object.md#plain-objectensure)
|
||||
- **String**
|
||||
- [`string/coerce`](docs/string.md#stringcoerce)
|
||||
- [`string/ensure`](docs/string.md#stringensure)
|
||||
- **Number**
|
||||
- [`number/coerce`](docs/number.md#numbercoerce)
|
||||
- [`number/ensure`](docs/number.md#numberensure)
|
||||
- **Finite Number**
|
||||
- [`finite/coerce`](docs/finite.md#finitecoerce)
|
||||
- [`finite/ensure`](docs/finite.md#finiteensure)
|
||||
- **Integer Number**
|
||||
- [`integer/coerce`](docs/integer.md#integercoerce)
|
||||
- [`integer/ensure`](docs/integer.md#integerensure)
|
||||
- **Safe Integer Number**
|
||||
- [`safe-integer/coerce`](docs/safe-integer.md#safe-integercoerce)
|
||||
- [`safe-integer/ensure`](docs/.md#safe-integerensure)
|
||||
- **Natural Number**
|
||||
- [`natural-number/coerce`](docs/natural-number.md#natural-numbercoerce)
|
||||
- [`natural-number/ensure`](docs/natural-number.md#natural-numberensure)
|
||||
- **Array Length**
|
||||
- [`array-length/coerce`](docs/array-length.md#array-lengthcoerce)
|
||||
- [`array-length/ensure`](docs/array-length.md#array-lengthensure)
|
||||
- **Time Value**
|
||||
- [`time-value/coerce`](docs/time-value.md#time-valuecoerce)
|
||||
- [`time-value/ensure`](docs/time-value.md#time-valueensure)
|
||||
- **BigInt**
|
||||
- [`big-int/coerce`](docs/big-int.md#big-intcoerce)
|
||||
- [`big-int/ensure`](docs/big-int.md#big-intensure)
|
||||
- **Array Like**
|
||||
- [`array-like/is`](docs/array-like.md#array-likeis)
|
||||
- [`array-like/ensure`](docs/array-like.md#array-likeensure)
|
||||
- **Array**
|
||||
- [`array/is`](docs/array.md#arrayis)
|
||||
- [`array/ensure`](docs/array.md#arrayensure)
|
||||
- **Iterable**
|
||||
- [`iterable/is`](docs/iterable.md#iterableis)
|
||||
- [`iterable/ensure`](docs/iterable.md#iterableensure)
|
||||
- **Set**
|
||||
- [`set/is`](docs/set.md#setis)
|
||||
- [`set/ensure`](docs/set.md#setensure)
|
||||
- **Map**
|
||||
- [`map/is`](docs/map.md#mapis)
|
||||
- [`map/ensure`](docs/map.md#mapensure)
|
||||
- **Date**
|
||||
- [`date/is`](docs/date.md#dateis)
|
||||
- [`date/ensure`](docs/date.md#dateensure)
|
||||
- **Function**
|
||||
- [`function/is`](docs/function.md#functionis)
|
||||
- [`function/ensure`](docs/function.md#functionensure)
|
||||
- **Constructor**
|
||||
- [`constructor/is`](docs/constructor.md#plain-functionis)
|
||||
- [`constructor/ensure`](docs/constructor.md#plain-functionensure)
|
||||
- **Plain Function**
|
||||
- [`plain-function/is`](docs/plain-function.md#plain-functionis)
|
||||
- [`plain-function/ensure`](docs/plain-function.md#plain-functionensure)
|
||||
- **Reg Exp**
|
||||
- [`reg-exp/is`](docs/reg-exp.md#reg-expis)
|
||||
- [`reg-exp/ensure`](docs/.md#reg-expensure)
|
||||
- **Thenable**
|
||||
- [`thenable/is`](docs/thenable.md#thenableis)
|
||||
- [`thenable/ensure`](docs/thenable.md#thenableensure)
|
||||
- **Promise**
|
||||
- [`promise/is`](docs/promise.md#promiseis)
|
||||
- [`promise/ensure`](docs/promise.md#promiseensure)
|
||||
- **Error**
|
||||
- [`error/is`](docs/error.md#erroris)
|
||||
- [`error/ensure`](docs/error.md#errorensure)
|
||||
- **Prototype**
|
||||
- [`prototype/is`](docs/prototype.md#prototypeis)
|
||||
|
||||
### Tests
|
||||
|
||||
$ npm test
|
||||
|
||||
[build-image]: https://github.com/medikoo/type/workflows/Integrate/badge.svg
|
||||
[build-url]: https://github.com/medikoo/type/actions?query=workflow%3AIntegrate
|
||||
[cov-image]: https://img.shields.io/codecov/c/github/medikoo/type.svg
|
||||
[cov-url]: https://codecov.io/gh/medikoo/type
|
||||
[npm-image]: https://img.shields.io/npm/v/type.svg
|
||||
[npm-url]: https://www.npmjs.com/package/type
|
||||
@@ -0,0 +1,30 @@
|
||||
var arrayFilter = require('./_arrayFilter'),
|
||||
stubArray = require('./stubArray');
|
||||
|
||||
/** Used for built-in method references. */
|
||||
var objectProto = Object.prototype;
|
||||
|
||||
/** Built-in value references. */
|
||||
var propertyIsEnumerable = objectProto.propertyIsEnumerable;
|
||||
|
||||
/* Built-in method references for those with the same name as other `lodash` methods. */
|
||||
var nativeGetSymbols = Object.getOwnPropertySymbols;
|
||||
|
||||
/**
|
||||
* Creates an array of the own enumerable symbols of `object`.
|
||||
*
|
||||
* @private
|
||||
* @param {Object} object The object to query.
|
||||
* @returns {Array} Returns the array of symbols.
|
||||
*/
|
||||
var getSymbols = !nativeGetSymbols ? stubArray : function(object) {
|
||||
if (object == null) {
|
||||
return [];
|
||||
}
|
||||
object = Object(object);
|
||||
return arrayFilter(nativeGetSymbols(object), function(symbol) {
|
||||
return propertyIsEnumerable.call(object, symbol);
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = getSymbols;
|
||||
@@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
indentRecursive: ()=>indentRecursive,
|
||||
formatNodes: ()=>formatNodes,
|
||||
readFileWithRetries: ()=>readFileWithRetries,
|
||||
drainStdin: ()=>drainStdin,
|
||||
outputFile: ()=>outputFile
|
||||
});
|
||||
const _fs = /*#__PURE__*/ _interopRequireDefault(require("fs"));
|
||||
const _path = /*#__PURE__*/ _interopRequireDefault(require("path"));
|
||||
function _interopRequireDefault(obj) {
|
||||
return obj && obj.__esModule ? obj : {
|
||||
default: obj
|
||||
};
|
||||
}
|
||||
function indentRecursive(node, indent = 0) {
|
||||
node.each && node.each((child, i)=>{
|
||||
if (!child.raws.before || !child.raws.before.trim() || child.raws.before.includes("\n")) {
|
||||
child.raws.before = `\n${node.type !== "rule" && i > 0 ? "\n" : ""}${" ".repeat(indent)}`;
|
||||
}
|
||||
child.raws.after = `\n${" ".repeat(indent)}`;
|
||||
indentRecursive(child, indent + 1);
|
||||
});
|
||||
}
|
||||
function formatNodes(root) {
|
||||
indentRecursive(root);
|
||||
if (root.first) {
|
||||
root.first.raws.before = "";
|
||||
}
|
||||
}
|
||||
async function readFileWithRetries(path, tries = 5) {
|
||||
for(let n = 0; n <= tries; n++){
|
||||
try {
|
||||
return await _fs.default.promises.readFile(path, "utf8");
|
||||
} catch (err) {
|
||||
if (n !== tries) {
|
||||
if (err.code === "ENOENT" || err.code === "EBUSY") {
|
||||
await new Promise((resolve)=>setTimeout(resolve, 10));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
function drainStdin() {
|
||||
return new Promise((resolve, reject)=>{
|
||||
let result = "";
|
||||
process.stdin.on("data", (chunk)=>{
|
||||
result += chunk;
|
||||
});
|
||||
process.stdin.on("end", ()=>resolve(result));
|
||||
process.stdin.on("error", (err)=>reject(err));
|
||||
});
|
||||
}
|
||||
async function outputFile(file, newContents) {
|
||||
try {
|
||||
let currentContents = await _fs.default.promises.readFile(file, "utf8");
|
||||
if (currentContents === newContents) {
|
||||
return; // Skip writing the file
|
||||
}
|
||||
} catch {}
|
||||
// Write the file
|
||||
await _fs.default.promises.mkdir(_path.default.dirname(file), {
|
||||
recursive: true
|
||||
});
|
||||
await _fs.default.promises.writeFile(file, newContents, "utf8");
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
# has-bigints <sup>[![Version Badge][npm-version-svg]][package-url]</sup>
|
||||
|
||||
[![github actions][actions-image]][actions-url]
|
||||
[![coverage][codecov-image]][codecov-url]
|
||||
[![dependency status][deps-svg]][deps-url]
|
||||
[![dev dependency status][dev-deps-svg]][dev-deps-url]
|
||||
[![License][license-image]][license-url]
|
||||
[![Downloads][downloads-image]][downloads-url]
|
||||
|
||||
[![npm badge][npm-badge-png]][package-url]
|
||||
|
||||
Determine if the JS environment has BigInt support.
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
var hasBigInts = require('has-bigints');
|
||||
|
||||
hasBigInts() === true; // if the environment has native BigInt support. Not polyfillable, not forgeable.
|
||||
```
|
||||
|
||||
## Tests
|
||||
Simply clone the repo, `npm install`, and run `npm test`
|
||||
|
||||
[package-url]: https://npmjs.org/package/has-bigints
|
||||
[npm-version-svg]: https://versionbadg.es/inspect-js/has-bigints.svg
|
||||
[deps-svg]: https://david-dm.org/inspect-js/has-bigints.svg
|
||||
[deps-url]: https://david-dm.org/inspect-js/has-bigints
|
||||
[dev-deps-svg]: https://david-dm.org/inspect-js/has-bigints/dev-status.svg
|
||||
[dev-deps-url]: https://david-dm.org/inspect-js/has-bigints#info=devDependencies
|
||||
[npm-badge-png]: https://nodei.co/npm/has-bigints.png?downloads=true&stars=true
|
||||
[license-image]: https://img.shields.io/npm/l/has-bigints.svg
|
||||
[license-url]: LICENSE
|
||||
[downloads-image]: https://img.shields.io/npm/dm/has-bigints.svg
|
||||
[downloads-url]: https://npm-stat.com/charts.html?package=has-bigints
|
||||
[codecov-image]: https://codecov.io/gh/inspect-js/has-bigints/branch/main/graphs/badge.svg
|
||||
[codecov-url]: https://app.codecov.io/gh/inspect-js/has-bigints/
|
||||
[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/has-bigints
|
||||
[actions-url]: https://github.com/inspect-js/has-bigints/actions
|
||||
@@ -0,0 +1,32 @@
|
||||
import { Observable } from '../Observable';
|
||||
import { Unsubscribable, ObservableInput, ObservedValueOf } from '../types';
|
||||
/**
|
||||
* Creates an Observable that uses a resource which will be disposed at the same time as the Observable.
|
||||
*
|
||||
* <span class="informal">Use it when you catch yourself cleaning up after an Observable.</span>
|
||||
*
|
||||
* `using` is a factory operator, which accepts two functions. First function returns a disposable resource.
|
||||
* It can be an arbitrary object that implements `unsubscribe` method. Second function will be injected with
|
||||
* that object and should return an Observable. That Observable can use resource object during its execution.
|
||||
* Both functions passed to `using` will be called every time someone subscribes - neither an Observable nor
|
||||
* resource object will be shared in any way between subscriptions.
|
||||
*
|
||||
* When Observable returned by `using` is subscribed, Observable returned from the second function will be subscribed
|
||||
* as well. All its notifications (nexted values, completion and error events) will be emitted unchanged by the output
|
||||
* Observable. If however someone unsubscribes from the Observable or source Observable completes or errors by itself,
|
||||
* the `unsubscribe` method on resource object will be called. This can be used to do any necessary clean up, which
|
||||
* otherwise would have to be handled by hand. Note that complete or error notifications are not emitted when someone
|
||||
* cancels subscription to an Observable via `unsubscribe`, so `using` can be used as a hook, allowing you to make
|
||||
* sure that all resources which need to exist during an Observable execution will be disposed at appropriate time.
|
||||
*
|
||||
* @see {@link defer}
|
||||
*
|
||||
* @param {function(): ISubscription} resourceFactory A function which creates any resource object
|
||||
* that implements `unsubscribe` method.
|
||||
* @param {function(resource: ISubscription): Observable<T>} observableFactory A function which
|
||||
* creates an Observable, that can use injected resource object.
|
||||
* @return {Observable<T>} An Observable that behaves the same as Observable returned by `observableFactory`, but
|
||||
* which - when completed, errored or unsubscribed - will also call `unsubscribe` on created resource object.
|
||||
*/
|
||||
export declare function using<T extends ObservableInput<any>>(resourceFactory: () => Unsubscribable | void, observableFactory: (resource: Unsubscribable | void) => T | void): Observable<ObservedValueOf<T>>;
|
||||
//# sourceMappingURL=using.d.ts.map
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"applyMixins.js","sourceRoot":"","sources":["../../../../src/internal/util/applyMixins.ts"],"names":[],"mappings":"AAAA,MAAM,UAAU,WAAW,CAAC,WAAgB,EAAE,SAAgB;IAC5D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,GAAG,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QACpD,IAAM,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;QAC9B,IAAM,YAAY,GAAG,MAAM,CAAC,mBAAmB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;QACpE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,IAAI,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;YACzD,IAAM,MAAI,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;YAC7B,WAAW,CAAC,SAAS,CAAC,MAAI,CAAC,GAAG,QAAQ,CAAC,SAAS,CAAC,MAAI,CAAC,CAAC;SACxD;KACF;AACH,CAAC"}
|
||||
@@ -0,0 +1,5 @@
|
||||
import { QueueAction } from './QueueAction';
|
||||
import { QueueScheduler } from './QueueScheduler';
|
||||
export const queueScheduler = new QueueScheduler(QueueAction);
|
||||
export const queue = queueScheduler;
|
||||
//# sourceMappingURL=queue.js.map
|
||||
@@ -0,0 +1,31 @@
|
||||
# is-installed-globally
|
||||
|
||||
> Check if your package was installed globally
|
||||
|
||||
Can be useful if your CLI needs different behavior when installed globally and locally.
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install is-installed-globally
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const isInstalledGlobally = require('is-installed-globally');
|
||||
|
||||
// With `npm install your-package`
|
||||
console.log(isInstalledGlobally);
|
||||
//=> false
|
||||
|
||||
// With `npm install --global your-package`
|
||||
console.log(isInstalledGlobally);
|
||||
//=> true
|
||||
```
|
||||
|
||||
## Related
|
||||
|
||||
- [import-global](https://github.com/sindresorhus/import-global) - Import a globally installed module
|
||||
- [resolve-global](https://github.com/sindresorhus/resolve-global) - Resolve the path of a globally installed module
|
||||
- [global-dirs](https://github.com/sindresorhus/global-dirs) - Get the directory of globally installed packages and binaries
|
||||
@@ -0,0 +1,6 @@
|
||||
export type ResponseScan = {
|
||||
id: number;
|
||||
runner: string;
|
||||
valid: boolean;
|
||||
distance: number;
|
||||
};
|
||||
@@ -0,0 +1,70 @@
|
||||
# Change Log
|
||||
## 4.1.0
|
||||
> Released 07/24/2019
|
||||
* Adds int64 support for node v12+
|
||||
* Drops support for node v4
|
||||
|
||||
## 4.0
|
||||
> Released 10/21/2017
|
||||
* Major breaking changes arriving in v4.
|
||||
|
||||
### New Features
|
||||
* Ability to read data from a specific offset. ex: readInt8(5)
|
||||
* Ability to write over data when an offset is given (see breaking changes) ex: writeInt8(5, 0);
|
||||
* Ability to set internal read and write offsets.
|
||||
|
||||
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
* Old constructor patterns have been completely removed. It's now required to use the SmartBuffer.fromXXX() factory constructors. Read more on the v4 docs.
|
||||
* rewind(), skip(), moveTo() have been removed.
|
||||
* Internal private properties are now prefixed with underscores (_).
|
||||
* **All** writeXXX() methods that are given an offset will now **overwrite data** instead of insert
|
||||
* insertXXX() methods have been added for when you want to insert data at a specific offset (this replaces the old behavior of writeXXX() when an offset was provided)
|
||||
|
||||
|
||||
### Other Changes
|
||||
* Standardizd error messaging
|
||||
* Standardized offset/length bounds and sanity checking
|
||||
* General overall cleanup of code.
|
||||
|
||||
## 3.0.3
|
||||
> Released 02/19/2017
|
||||
* Adds missing type definitions for some internal functions.
|
||||
|
||||
## 3.0.2
|
||||
> Released 02/17/2017
|
||||
|
||||
### Bug Fixes
|
||||
* Fixes a bug where using readString with a length of zero resulted in reading the remaining data instead of returning an empty string. (Fixed by Seldszar)
|
||||
|
||||
## 3.0.1
|
||||
> Released 02/15/2017
|
||||
|
||||
### Bug Fixes
|
||||
* Fixes a bug leftover from the TypeScript refactor where .readIntXXX() resulted in .readUIntXXX() being called by mistake.
|
||||
|
||||
## 3.0
|
||||
> Released 02/12/2017
|
||||
|
||||
### Bug Fixes
|
||||
* readUIntXXXX() methods will now throw an exception if they attempt to read beyond the bounds of the valid buffer data available.
|
||||
* **Note** This is technically a breaking change, so version is bumped to 3.x.
|
||||
|
||||
## 2.0
|
||||
> Relased 01/30/2017
|
||||
|
||||
### New Features:
|
||||
|
||||
* Entire package re-written in TypeScript (2.1)
|
||||
* Backwards compatibility is preserved for now
|
||||
* New factory methods for creating SmartBuffer instances
|
||||
* SmartBuffer.fromSize()
|
||||
* SmartBuffer.fromBuffer()
|
||||
* SmartBuffer.fromOptions()
|
||||
* New SmartBufferOptions constructor options
|
||||
* Added additional tests
|
||||
|
||||
### Bug Fixes:
|
||||
* Fixes a bug where reading null terminated strings may result in an exception.
|
||||
@@ -0,0 +1 @@
|
||||
{"name":"escodegen","version":"1.14.3","files":{"LICENSE.BSD":{"checkedAt":1678883672075,"integrity":"sha512-oe+64gaD6vjCdgd+uRSfG7lRu6cQLgwEzZg0KSf8uMZbJPuqxXECTMvD19mezDhbKjhrhHMmZWtiuNmBCwK/gQ==","mode":420,"size":1315},"bin/escodegen.js":{"checkedAt":1678883672075,"integrity":"sha512-SqbJFn466MleBGG/brzvA7y123vV2sfXaaaye/mhrHg11xJlHYQE8epwotKiCo1S/ro5nRKQLAhUYQS9jwEkzg==","mode":493,"size":2710},"escodegen.js":{"checkedAt":1678883672122,"integrity":"sha512-NQDtTSkl4d4ph2g7NDsh1epPHXq4Pfn+otv554+4lOsfRmYeKCJtsGVfisu9sdaJYQexdjbeXrGP+CfW/2SLpQ==","mode":420,"size":95718},"bin/esgenerate.js":{"checkedAt":1678883672122,"integrity":"sha512-8magmUCCQWnen4CHLf1xaC8JU6xTsBWfyI9ef7Blem59wF3S+GQidCl23rAmV/BQsPRkfG9bujnSzae7NVrnng==","mode":493,"size":2415},"package.json":{"checkedAt":1678883672122,"integrity":"sha512-ip+Uq+TdlP1YduM8NGprk4hKY2OEeaW9g9Zza7SmRZEZl9jkytS7ibCN6HkWObKpkRUg5eRX4IehPAYCTu1JzA==","mode":420,"size":1695},"README.md":{"checkedAt":1678883672122,"integrity":"sha512-j9WQcIUpUCFEBXnyecTr5D+oXJF8PfIG77bKup3H8DyLnjCLto5fD5ksR2+yLkrV+M+uwj3biv0YyhSHtpLNdA==","mode":420,"size":3287}}}
|
||||
@@ -0,0 +1,9 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2014-2022 Patrik Simek and contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -0,0 +1,259 @@
|
||||
'use strict'
|
||||
module.exports = writeFile
|
||||
module.exports.sync = writeFileSync
|
||||
module.exports._getTmpname = getTmpname // for testing
|
||||
module.exports._cleanupOnExit = cleanupOnExit
|
||||
|
||||
const fs = require('fs')
|
||||
const MurmurHash3 = require('imurmurhash')
|
||||
const onExit = require('signal-exit')
|
||||
const path = require('path')
|
||||
const isTypedArray = require('is-typedarray')
|
||||
const typedArrayToBuffer = require('typedarray-to-buffer')
|
||||
const { promisify } = require('util')
|
||||
const activeFiles = {}
|
||||
|
||||
// if we run inside of a worker_thread, `process.pid` is not unique
|
||||
/* istanbul ignore next */
|
||||
const threadId = (function getId () {
|
||||
try {
|
||||
const workerThreads = require('worker_threads')
|
||||
|
||||
/// if we are in main thread, this is set to `0`
|
||||
return workerThreads.threadId
|
||||
} catch (e) {
|
||||
// worker_threads are not available, fallback to 0
|
||||
return 0
|
||||
}
|
||||
})()
|
||||
|
||||
let invocations = 0
|
||||
function getTmpname (filename) {
|
||||
return filename + '.' +
|
||||
MurmurHash3(__filename)
|
||||
.hash(String(process.pid))
|
||||
.hash(String(threadId))
|
||||
.hash(String(++invocations))
|
||||
.result()
|
||||
}
|
||||
|
||||
function cleanupOnExit (tmpfile) {
|
||||
return () => {
|
||||
try {
|
||||
fs.unlinkSync(typeof tmpfile === 'function' ? tmpfile() : tmpfile)
|
||||
} catch (_) {}
|
||||
}
|
||||
}
|
||||
|
||||
function serializeActiveFile (absoluteName) {
|
||||
return new Promise(resolve => {
|
||||
// make a queue if it doesn't already exist
|
||||
if (!activeFiles[absoluteName]) activeFiles[absoluteName] = []
|
||||
|
||||
activeFiles[absoluteName].push(resolve) // add this job to the queue
|
||||
if (activeFiles[absoluteName].length === 1) resolve() // kick off the first one
|
||||
})
|
||||
}
|
||||
|
||||
// https://github.com/isaacs/node-graceful-fs/blob/master/polyfills.js#L315-L342
|
||||
function isChownErrOk (err) {
|
||||
if (err.code === 'ENOSYS') {
|
||||
return true
|
||||
}
|
||||
|
||||
const nonroot = !process.getuid || process.getuid() !== 0
|
||||
if (nonroot) {
|
||||
if (err.code === 'EINVAL' || err.code === 'EPERM') {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
async function writeFileAsync (filename, data, options = {}) {
|
||||
if (typeof options === 'string') {
|
||||
options = { encoding: options }
|
||||
}
|
||||
|
||||
let fd
|
||||
let tmpfile
|
||||
/* istanbul ignore next -- The closure only gets called when onExit triggers */
|
||||
const removeOnExitHandler = onExit(cleanupOnExit(() => tmpfile))
|
||||
const absoluteName = path.resolve(filename)
|
||||
|
||||
try {
|
||||
await serializeActiveFile(absoluteName)
|
||||
const truename = await promisify(fs.realpath)(filename).catch(() => filename)
|
||||
tmpfile = getTmpname(truename)
|
||||
|
||||
if (!options.mode || !options.chown) {
|
||||
// Either mode or chown is not explicitly set
|
||||
// Default behavior is to copy it from original file
|
||||
const stats = await promisify(fs.stat)(truename).catch(() => {})
|
||||
if (stats) {
|
||||
if (options.mode == null) {
|
||||
options.mode = stats.mode
|
||||
}
|
||||
|
||||
if (options.chown == null && process.getuid) {
|
||||
options.chown = { uid: stats.uid, gid: stats.gid }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fd = await promisify(fs.open)(tmpfile, 'w', options.mode)
|
||||
if (options.tmpfileCreated) {
|
||||
await options.tmpfileCreated(tmpfile)
|
||||
}
|
||||
if (isTypedArray(data)) {
|
||||
data = typedArrayToBuffer(data)
|
||||
}
|
||||
if (Buffer.isBuffer(data)) {
|
||||
await promisify(fs.write)(fd, data, 0, data.length, 0)
|
||||
} else if (data != null) {
|
||||
await promisify(fs.write)(fd, String(data), 0, String(options.encoding || 'utf8'))
|
||||
}
|
||||
|
||||
if (options.fsync !== false) {
|
||||
await promisify(fs.fsync)(fd)
|
||||
}
|
||||
|
||||
await promisify(fs.close)(fd)
|
||||
fd = null
|
||||
|
||||
if (options.chown) {
|
||||
await promisify(fs.chown)(tmpfile, options.chown.uid, options.chown.gid).catch(err => {
|
||||
if (!isChownErrOk(err)) {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (options.mode) {
|
||||
await promisify(fs.chmod)(tmpfile, options.mode).catch(err => {
|
||||
if (!isChownErrOk(err)) {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
await promisify(fs.rename)(tmpfile, truename)
|
||||
} finally {
|
||||
if (fd) {
|
||||
await promisify(fs.close)(fd).catch(
|
||||
/* istanbul ignore next */
|
||||
() => {}
|
||||
)
|
||||
}
|
||||
removeOnExitHandler()
|
||||
await promisify(fs.unlink)(tmpfile).catch(() => {})
|
||||
activeFiles[absoluteName].shift() // remove the element added by serializeSameFile
|
||||
if (activeFiles[absoluteName].length > 0) {
|
||||
activeFiles[absoluteName][0]() // start next job if one is pending
|
||||
} else delete activeFiles[absoluteName]
|
||||
}
|
||||
}
|
||||
|
||||
function writeFile (filename, data, options, callback) {
|
||||
if (options instanceof Function) {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
|
||||
const promise = writeFileAsync(filename, data, options)
|
||||
if (callback) {
|
||||
promise.then(callback, callback)
|
||||
}
|
||||
|
||||
return promise
|
||||
}
|
||||
|
||||
function writeFileSync (filename, data, options) {
|
||||
if (typeof options === 'string') options = { encoding: options }
|
||||
else if (!options) options = {}
|
||||
try {
|
||||
filename = fs.realpathSync(filename)
|
||||
} catch (ex) {
|
||||
// it's ok, it'll happen on a not yet existing file
|
||||
}
|
||||
const tmpfile = getTmpname(filename)
|
||||
|
||||
if (!options.mode || !options.chown) {
|
||||
// Either mode or chown is not explicitly set
|
||||
// Default behavior is to copy it from original file
|
||||
try {
|
||||
const stats = fs.statSync(filename)
|
||||
options = Object.assign({}, options)
|
||||
if (!options.mode) {
|
||||
options.mode = stats.mode
|
||||
}
|
||||
if (!options.chown && process.getuid) {
|
||||
options.chown = { uid: stats.uid, gid: stats.gid }
|
||||
}
|
||||
} catch (ex) {
|
||||
// ignore stat errors
|
||||
}
|
||||
}
|
||||
|
||||
let fd
|
||||
const cleanup = cleanupOnExit(tmpfile)
|
||||
const removeOnExitHandler = onExit(cleanup)
|
||||
|
||||
let threw = true
|
||||
try {
|
||||
fd = fs.openSync(tmpfile, 'w', options.mode || 0o666)
|
||||
if (options.tmpfileCreated) {
|
||||
options.tmpfileCreated(tmpfile)
|
||||
}
|
||||
if (isTypedArray(data)) {
|
||||
data = typedArrayToBuffer(data)
|
||||
}
|
||||
if (Buffer.isBuffer(data)) {
|
||||
fs.writeSync(fd, data, 0, data.length, 0)
|
||||
} else if (data != null) {
|
||||
fs.writeSync(fd, String(data), 0, String(options.encoding || 'utf8'))
|
||||
}
|
||||
if (options.fsync !== false) {
|
||||
fs.fsyncSync(fd)
|
||||
}
|
||||
|
||||
fs.closeSync(fd)
|
||||
fd = null
|
||||
|
||||
if (options.chown) {
|
||||
try {
|
||||
fs.chownSync(tmpfile, options.chown.uid, options.chown.gid)
|
||||
} catch (err) {
|
||||
if (!isChownErrOk(err)) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (options.mode) {
|
||||
try {
|
||||
fs.chmodSync(tmpfile, options.mode)
|
||||
} catch (err) {
|
||||
if (!isChownErrOk(err)) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fs.renameSync(tmpfile, filename)
|
||||
threw = false
|
||||
} finally {
|
||||
if (fd) {
|
||||
try {
|
||||
fs.closeSync(fd)
|
||||
} catch (ex) {
|
||||
// ignore close errors at this stage, error may have closed fd already.
|
||||
}
|
||||
}
|
||||
removeOnExitHandler()
|
||||
if (threw) {
|
||||
cleanup()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = isByteLength;
|
||||
|
||||
var _assertString = _interopRequireDefault(require("./util/assertString"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
|
||||
|
||||
/* eslint-disable prefer-rest-params */
|
||||
function isByteLength(str, options) {
|
||||
(0, _assertString.default)(str);
|
||||
var min;
|
||||
var max;
|
||||
|
||||
if (_typeof(options) === 'object') {
|
||||
min = options.min || 0;
|
||||
max = options.max;
|
||||
} else {
|
||||
// backwards compatibility: isByteLength(str, min [, max])
|
||||
min = arguments[1];
|
||||
max = arguments[2];
|
||||
}
|
||||
|
||||
var len = encodeURI(str).split(/%..|./).length - 1;
|
||||
return len >= min && (typeof max === 'undefined' || len <= max);
|
||||
}
|
||||
|
||||
module.exports = exports.default;
|
||||
module.exports.default = exports.default;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"NotificationFactories.js","sourceRoot":"","sources":["../../../src/internal/NotificationFactories.ts"],"names":[],"mappings":"AAOA,MAAM,CAAC,MAAM,qBAAqB,GAAG,CAAC,GAAG,EAAE,CAAC,kBAAkB,CAAC,GAAG,EAAE,SAAS,EAAE,SAAS,CAAyB,CAAC,EAAE,CAAC;AAOrH,MAAM,UAAU,iBAAiB,CAAC,KAAU;IAC1C,OAAO,kBAAkB,CAAC,GAAG,EAAE,SAAS,EAAE,KAAK,CAAQ,CAAC;AAC1D,CAAC;AAOD,MAAM,UAAU,gBAAgB,CAAI,KAAQ;IAC1C,OAAO,kBAAkB,CAAC,GAAG,EAAE,KAAK,EAAE,SAAS,CAAwB,CAAC;AAC1E,CAAC;AAQD,MAAM,UAAU,kBAAkB,CAAC,IAAqB,EAAE,KAAU,EAAE,KAAU;IAC9E,OAAO;QACL,IAAI;QACJ,KAAK;QACL,KAAK;KACN,CAAC;AACJ,CAAC"}
|
||||
@@ -0,0 +1,138 @@
|
||||
import { Input, Hash, HashXOF } from './utils.js';
|
||||
import { Keccak, ShakeOpts } from './sha3.js';
|
||||
export declare type cShakeOpts = ShakeOpts & {
|
||||
personalization?: Input;
|
||||
NISTfn?: Input;
|
||||
};
|
||||
export declare const cshake128: {
|
||||
(msg: Input, opts?: cShakeOpts | undefined): Uint8Array;
|
||||
outputLen: number;
|
||||
blockLen: number;
|
||||
create(opts: cShakeOpts): Hash<Keccak>;
|
||||
};
|
||||
export declare const cshake256: {
|
||||
(msg: Input, opts?: cShakeOpts | undefined): Uint8Array;
|
||||
outputLen: number;
|
||||
blockLen: number;
|
||||
create(opts: cShakeOpts): Hash<Keccak>;
|
||||
};
|
||||
declare class KMAC extends Keccak implements HashXOF<KMAC> {
|
||||
constructor(blockLen: number, outputLen: number, enableXOF: boolean, key: Input, opts?: cShakeOpts);
|
||||
protected finish(): void;
|
||||
_cloneInto(to?: KMAC): KMAC;
|
||||
clone(): KMAC;
|
||||
}
|
||||
export declare const kmac128: {
|
||||
(key: Input, message: Input, opts?: cShakeOpts): Uint8Array;
|
||||
create(key: Input, opts?: cShakeOpts): KMAC;
|
||||
};
|
||||
export declare const kmac256: {
|
||||
(key: Input, message: Input, opts?: cShakeOpts): Uint8Array;
|
||||
create(key: Input, opts?: cShakeOpts): KMAC;
|
||||
};
|
||||
export declare const kmac128xof: {
|
||||
(key: Input, message: Input, opts?: cShakeOpts): Uint8Array;
|
||||
create(key: Input, opts?: cShakeOpts): KMAC;
|
||||
};
|
||||
export declare const kmac256xof: {
|
||||
(key: Input, message: Input, opts?: cShakeOpts): Uint8Array;
|
||||
create(key: Input, opts?: cShakeOpts): KMAC;
|
||||
};
|
||||
declare class TupleHash extends Keccak implements HashXOF<TupleHash> {
|
||||
constructor(blockLen: number, outputLen: number, enableXOF: boolean, opts?: cShakeOpts);
|
||||
protected finish(): void;
|
||||
_cloneInto(to?: TupleHash): TupleHash;
|
||||
clone(): TupleHash;
|
||||
}
|
||||
export declare const tuplehash128: {
|
||||
(messages: Input[], opts?: cShakeOpts): Uint8Array;
|
||||
create(opts?: cShakeOpts): TupleHash;
|
||||
};
|
||||
export declare const tuplehash256: {
|
||||
(messages: Input[], opts?: cShakeOpts): Uint8Array;
|
||||
create(opts?: cShakeOpts): TupleHash;
|
||||
};
|
||||
export declare const tuplehash128xof: {
|
||||
(messages: Input[], opts?: cShakeOpts): Uint8Array;
|
||||
create(opts?: cShakeOpts): TupleHash;
|
||||
};
|
||||
export declare const tuplehash256xof: {
|
||||
(messages: Input[], opts?: cShakeOpts): Uint8Array;
|
||||
create(opts?: cShakeOpts): TupleHash;
|
||||
};
|
||||
declare type ParallelOpts = cShakeOpts & {
|
||||
blockLen?: number;
|
||||
};
|
||||
declare class ParallelHash extends Keccak implements HashXOF<ParallelHash> {
|
||||
protected leafCons: () => Hash<Keccak>;
|
||||
private leafHash?;
|
||||
private chunkPos;
|
||||
private chunksDone;
|
||||
private chunkLen;
|
||||
constructor(blockLen: number, outputLen: number, leafCons: () => Hash<Keccak>, enableXOF: boolean, opts?: ParallelOpts);
|
||||
protected finish(): void;
|
||||
_cloneInto(to?: ParallelHash): ParallelHash;
|
||||
destroy(): void;
|
||||
clone(): ParallelHash;
|
||||
}
|
||||
export declare const parallelhash128: {
|
||||
(message: Input, opts?: ParallelOpts): Uint8Array;
|
||||
create(opts?: ParallelOpts): ParallelHash;
|
||||
};
|
||||
export declare const parallelhash256: {
|
||||
(message: Input, opts?: ParallelOpts): Uint8Array;
|
||||
create(opts?: ParallelOpts): ParallelHash;
|
||||
};
|
||||
export declare const parallelhash128xof: {
|
||||
(message: Input, opts?: ParallelOpts): Uint8Array;
|
||||
create(opts?: ParallelOpts): ParallelHash;
|
||||
};
|
||||
export declare const parallelhash256xof: {
|
||||
(message: Input, opts?: ParallelOpts): Uint8Array;
|
||||
create(opts?: ParallelOpts): ParallelHash;
|
||||
};
|
||||
export declare type KangarooOpts = {
|
||||
dkLen?: number;
|
||||
personalization?: Input;
|
||||
};
|
||||
declare class KangarooTwelve extends Keccak implements HashXOF<KangarooTwelve> {
|
||||
protected leafLen: number;
|
||||
readonly chunkLen = 8192;
|
||||
private leafHash?;
|
||||
private personalization;
|
||||
private chunkPos;
|
||||
private chunksDone;
|
||||
constructor(blockLen: number, leafLen: number, outputLen: number, rounds: number, opts: KangarooOpts);
|
||||
update(data: Input): this;
|
||||
protected finish(): void;
|
||||
destroy(): void;
|
||||
_cloneInto(to?: KangarooTwelve): KangarooTwelve;
|
||||
clone(): KangarooTwelve;
|
||||
}
|
||||
export declare const k12: {
|
||||
(msg: Input, opts?: KangarooOpts | undefined): Uint8Array;
|
||||
outputLen: number;
|
||||
blockLen: number;
|
||||
create(opts: KangarooOpts): Hash<KangarooTwelve>;
|
||||
};
|
||||
export declare const m14: {
|
||||
(msg: Input, opts?: KangarooOpts | undefined): Uint8Array;
|
||||
outputLen: number;
|
||||
blockLen: number;
|
||||
create(opts: KangarooOpts): Hash<KangarooTwelve>;
|
||||
};
|
||||
declare class KeccakPRG extends Keccak {
|
||||
protected rate: number;
|
||||
constructor(capacity: number);
|
||||
keccak(): void;
|
||||
update(data: Input): this;
|
||||
feed(data: Input): this;
|
||||
protected finish(): void;
|
||||
digestInto(out: Uint8Array): Uint8Array;
|
||||
fetch(bytes: number): Uint8Array;
|
||||
forget(): void;
|
||||
_cloneInto(to?: KeccakPRG): KeccakPRG;
|
||||
clone(): KeccakPRG;
|
||||
}
|
||||
export declare const keccakprg: (capacity?: number) => KeccakPRG;
|
||||
export {};
|
||||
@@ -0,0 +1,125 @@
|
||||
# is-plain-object [](https://www.npmjs.com/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://npmjs.org/package/is-plain-object) [](https://travis-ci.org/jonschlinkert/is-plain-object)
|
||||
|
||||
> Returns true if an object was created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save is-plain-object
|
||||
```
|
||||
|
||||
Use [isobject](https://github.com/jonschlinkert/isobject) if you only want to check if the value is an object and not an array or null.
|
||||
|
||||
## Usage
|
||||
|
||||
with es modules
|
||||
```js
|
||||
import { isPlainObject } from 'is-plain-object';
|
||||
```
|
||||
|
||||
or with commonjs
|
||||
```js
|
||||
const { isPlainObject } = require('is-plain-object');
|
||||
```
|
||||
|
||||
**true** when created by the `Object` constructor, or Object.create(null).
|
||||
|
||||
```js
|
||||
isPlainObject(Object.create({}));
|
||||
//=> true
|
||||
isPlainObject(Object.create(Object.prototype));
|
||||
//=> true
|
||||
isPlainObject({foo: 'bar'});
|
||||
//=> true
|
||||
isPlainObject({});
|
||||
//=> true
|
||||
isPlainObject(null);
|
||||
//=> true
|
||||
```
|
||||
|
||||
**false** when not created by the `Object` constructor.
|
||||
|
||||
```js
|
||||
isPlainObject(1);
|
||||
//=> false
|
||||
isPlainObject(['foo', 'bar']);
|
||||
//=> false
|
||||
isPlainObject([]);
|
||||
//=> false
|
||||
isPlainObject(new Foo);
|
||||
//=> false
|
||||
isPlainObject(Object.create(null));
|
||||
//=> false
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Related projects
|
||||
|
||||
You might also be interested in these projects:
|
||||
|
||||
* [is-number](https://www.npmjs.com/package/is-number): Returns true if a number or string value is a finite number. Useful for regex… [more](https://github.com/jonschlinkert/is-number) | [homepage](https://github.com/jonschlinkert/is-number "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.")
|
||||
* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.")
|
||||
* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.")
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| --- | --- |
|
||||
| 19 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 6 | [TrySound](https://github.com/TrySound) |
|
||||
| 6 | [stevenvachon](https://github.com/stevenvachon) |
|
||||
| 3 | [onokumus](https://github.com/onokumus) |
|
||||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
* [GitHub Profile](https://github.com/jonschlinkert)
|
||||
* [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
***
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 28, 2019._
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"queue.js","sourceRoot":"","sources":["../../../../src/internal/scheduler/queue.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAC;AAiElD,MAAM,CAAC,IAAM,cAAc,GAAG,IAAI,cAAc,CAAC,WAAW,CAAC,CAAC;AAK9D,MAAM,CAAC,IAAM,KAAK,GAAG,cAAc,CAAC"}
|
||||
@@ -0,0 +1,46 @@
|
||||
# String.prototype.trimEnd <sup>[![Version Badge][npm-version-svg]][package-url]</sup>
|
||||
|
||||
[![github actions][actions-image]][actions-url]
|
||||
[![coverage][codecov-image]][codecov-url]
|
||||
[![dependency status][deps-svg]][deps-url]
|
||||
[![dev dependency status][dev-deps-svg]][dev-deps-url]
|
||||
[![License][license-image]][license-url]
|
||||
[![Downloads][downloads-image]][downloads-url]
|
||||
|
||||
[![npm badge][npm-badge-png]][package-url]
|
||||
|
||||
An ES2019-spec-compliant `String.prototype.trimEnd` shim. Invoke its "shim" method to shim `String.prototype.trimEnd` if it is unavailable.
|
||||
|
||||
This package implements the [es-shim API](https://github.com/es-shims/api) interface. It works in an ES3-supported environment and complies with the [spec](https://www.ecma-international.org/ecma-262/6.0/#sec-object.assign). In an ES6 environment, it will also work properly with `Symbol`s.
|
||||
|
||||
Most common usage:
|
||||
```js
|
||||
var trimEnd = require('string.prototype.trimend');
|
||||
|
||||
assert(trimEnd(' \t\na \t\n') === 'a \t\n');
|
||||
|
||||
if (!String.prototype.trimEnd) {
|
||||
trimEnd.shim();
|
||||
}
|
||||
|
||||
assert(trimEnd(' \t\na \t\n ') === ' \t\na \t\n '.trimEnd());
|
||||
```
|
||||
|
||||
## Tests
|
||||
Simply clone the repo, `npm install`, and run `npm test`
|
||||
|
||||
[package-url]: https://npmjs.com/package/string.prototype.trimend
|
||||
[npm-version-svg]: https://vb.teelaun.ch/es-shims/String.prototype.trimEnd.svg
|
||||
[deps-svg]: https://david-dm.org/es-shims/String.prototype.trimEnd.svg
|
||||
[deps-url]: https://david-dm.org/es-shims/String.prototype.trimEnd
|
||||
[dev-deps-svg]: https://david-dm.org/es-shims/String.prototype.trimEnd/dev-status.svg
|
||||
[dev-deps-url]: https://david-dm.org/es-shims/String.prototype.trimEnd#info=devDependencies
|
||||
[npm-badge-png]: https://nodei.co/npm/string.prototype.trimend.png?downloads=true&stars=true
|
||||
[license-image]: https://img.shields.io/npm/l/string.prototype.trimend.svg
|
||||
[license-url]: LICENSE
|
||||
[downloads-image]: https://img.shields.io/npm/dm/string.prototype.trimend.svg
|
||||
[downloads-url]: https://npm-stat.com/charts.html?package=string.prototype.trimend
|
||||
[codecov-image]: https://codecov.io/gh/es-shims/String.prototype.trimEnd/branch/main/graphs/badge.svg
|
||||
[codecov-url]: https://app.codecov.io/gh/es-shims/String.prototype.trimEnd/
|
||||
[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/es-shims/String.prototype.trimEnd
|
||||
[actions-url]: https://github.com/es-shims/String.prototype.trimEnd/actions
|
||||
@@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.pairs = void 0;
|
||||
var from_1 = require("./from");
|
||||
function pairs(obj, scheduler) {
|
||||
return from_1.from(Object.entries(obj), scheduler);
|
||||
}
|
||||
exports.pairs = pairs;
|
||||
//# sourceMappingURL=pairs.js.map
|
||||
@@ -0,0 +1,5 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = require("./is-implemented")()
|
||||
? String.prototype[require("es6-symbol").iterator]
|
||||
: require("./shim");
|
||||
@@ -0,0 +1,9 @@
|
||||
import AbstractBlock from './shared/AbstractBlock';
|
||||
import Component from '../Component';
|
||||
import TemplateScope from './shared/TemplateScope';
|
||||
import { TemplateNode } from '../../interfaces';
|
||||
import Node from './shared/Node';
|
||||
export default class PendingBlock extends AbstractBlock {
|
||||
type: 'PendingBlock';
|
||||
constructor(component: Component, parent: Node, scope: TemplateScope, info: TemplateNode);
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
"use strict";
|
||||
/* istanbul ignore file */
|
||||
/* tslint:disable */
|
||||
/* eslint-disable */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
@@ -0,0 +1,15 @@
|
||||
import { concat } from '../observable/concat';
|
||||
import { take } from './take';
|
||||
import { ignoreElements } from './ignoreElements';
|
||||
import { mapTo } from './mapTo';
|
||||
import { mergeMap } from './mergeMap';
|
||||
import { innerFrom } from '../observable/innerFrom';
|
||||
export function delayWhen(delayDurationSelector, subscriptionDelay) {
|
||||
if (subscriptionDelay) {
|
||||
return function (source) {
|
||||
return concat(subscriptionDelay.pipe(take(1), ignoreElements()), source.pipe(delayWhen(delayDurationSelector)));
|
||||
};
|
||||
}
|
||||
return mergeMap(function (value, index) { return innerFrom(delayDurationSelector(value, index)).pipe(take(1), mapTo(value)); });
|
||||
}
|
||||
//# sourceMappingURL=delayWhen.js.map
|
||||
@@ -0,0 +1,70 @@
|
||||
import { createNewLookupObject } from './create-new-lookup-object';
|
||||
import * as logger from '../logger';
|
||||
|
||||
const loggedProperties = Object.create(null);
|
||||
|
||||
export function createProtoAccessControl(runtimeOptions) {
|
||||
let defaultMethodWhiteList = Object.create(null);
|
||||
defaultMethodWhiteList['constructor'] = false;
|
||||
defaultMethodWhiteList['__defineGetter__'] = false;
|
||||
defaultMethodWhiteList['__defineSetter__'] = false;
|
||||
defaultMethodWhiteList['__lookupGetter__'] = false;
|
||||
|
||||
let defaultPropertyWhiteList = Object.create(null);
|
||||
// eslint-disable-next-line no-proto
|
||||
defaultPropertyWhiteList['__proto__'] = false;
|
||||
|
||||
return {
|
||||
properties: {
|
||||
whitelist: createNewLookupObject(
|
||||
defaultPropertyWhiteList,
|
||||
runtimeOptions.allowedProtoProperties
|
||||
),
|
||||
defaultValue: runtimeOptions.allowProtoPropertiesByDefault
|
||||
},
|
||||
methods: {
|
||||
whitelist: createNewLookupObject(
|
||||
defaultMethodWhiteList,
|
||||
runtimeOptions.allowedProtoMethods
|
||||
),
|
||||
defaultValue: runtimeOptions.allowProtoMethodsByDefault
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function resultIsAllowed(result, protoAccessControl, propertyName) {
|
||||
if (typeof result === 'function') {
|
||||
return checkWhiteList(protoAccessControl.methods, propertyName);
|
||||
} else {
|
||||
return checkWhiteList(protoAccessControl.properties, propertyName);
|
||||
}
|
||||
}
|
||||
|
||||
function checkWhiteList(protoAccessControlForType, propertyName) {
|
||||
if (protoAccessControlForType.whitelist[propertyName] !== undefined) {
|
||||
return protoAccessControlForType.whitelist[propertyName] === true;
|
||||
}
|
||||
if (protoAccessControlForType.defaultValue !== undefined) {
|
||||
return protoAccessControlForType.defaultValue;
|
||||
}
|
||||
logUnexpecedPropertyAccessOnce(propertyName);
|
||||
return false;
|
||||
}
|
||||
|
||||
function logUnexpecedPropertyAccessOnce(propertyName) {
|
||||
if (loggedProperties[propertyName] !== true) {
|
||||
loggedProperties[propertyName] = true;
|
||||
logger.log(
|
||||
'error',
|
||||
`Handlebars: Access has been denied to resolve the property "${propertyName}" because it is not an "own property" of its parent.\n` +
|
||||
`You can add a runtime option to disable the check or this warning:\n` +
|
||||
`See https://handlebarsjs.com/api-reference/runtime-options.html#options-to-control-prototype-access for details`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function resetLoggedProperties() {
|
||||
Object.keys(loggedProperties).forEach(propertyName => {
|
||||
delete loggedProperties[propertyName];
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
# Filing issues
|
||||
|
||||
Please
|
||||
[check the existing issues](https://github.com/mozilla/localForage/issues) to
|
||||
make sure your issue hasn't already been filed.
|
||||
|
||||
If you have a bug to report, please file it.
|
||||
You are also encouraged to create an example (or [edit an existing one](http://codepen.io/thgreasi/pen/ojYKeE)) to showcase your issue.
|
||||
|
||||
If you'd like to see a feature
|
||||
implemented, you can file an issue, but know that pull requests for small
|
||||
things like adding a line in a config file will get more attention than an
|
||||
issue asking someone else to do it.
|
||||
|
||||
See below for [issues that have been discussed and will be rejected](#features-localforage-will-reject).
|
||||
|
||||
# Contributing to localForage
|
||||
|
||||
First off: thanks! Open source software (and thus all software) exists because
|
||||
of people like you. <3
|
||||
|
||||
If you'd like to contribute to localForage, it's as simple as opening a pull
|
||||
request on GitHub. After that someone will code review your work and either
|
||||
ask you to fix any errors or merge the code into master. Here are a few tips:
|
||||
|
||||
* **all drivers must have the same public API**: refer to [the API tests](https://github.com/mozilla/localForage/blob/master/test/test.api.coffee) for how we ensure this, but simply put: all drivers should share the _exact same_ outward-facing API
|
||||
* **do your work on a feature branch**: this keeps things clean and easy
|
||||
* **try to rebase master into your branch**: this keeps the commit history clean and avoids merge commits inside feature branches
|
||||
* **write tests**: if you're adding new features, _please_ write tests; likewise, if you're fixing a bug that wasn't previously caught by a test, please add one
|
||||
* **run `grunt build` before you commit**: this will build out the files in the `dist/` folder and ensure your tests pass
|
||||
|
||||
Please commit changes at the top-level folder along with your changes in the
|
||||
`src/` folder--**do not make these changes separate commits**. These are the
|
||||
built versions of `localforage.js` and `localforage.min.js`, which are
|
||||
used by bower.
|
||||
|
||||
If you have any questions, need some help, or anything else, don't feel shy!
|
||||
The team behind this library is often available on IRC
|
||||
([irc.mozilla.org](https://wiki.mozilla.org/IRC) on the `#apps` channel).
|
||||
|
||||
## Coding Style
|
||||
|
||||
All code can be tested for style by running the unit tests (`npm test`). This will make sure your code conforms to our styleguide. (You can read the rules in `.jshint` and `.jscsrc`.)
|
||||
|
||||
## Features localForage will reject
|
||||
|
||||
### node.js support
|
||||
|
||||
localForage is a browser library with a specific focus on client-side,
|
||||
offline storage. It is not a general-purpose storage library and is not meant
|
||||
to allow for the same API on the client and the server. Implementing the
|
||||
localForage API wouldn't be hard (it's just localStorage with callbacks and
|
||||
ES6 promises), but it's a job for another library.
|
||||
|
||||
### Legacy browser support
|
||||
|
||||
Basically this means anything before IE 8. I know there are hacky ways to
|
||||
support storage with cookies or IE Userdata or whatever, but anything worse
|
||||
than localStorage isn't worth investing into.
|
||||
@@ -0,0 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = require('./async').eachLimit;
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"J D E F A B CC"},B:{"1":"L G M N O P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H","2":"C K"},C:{"1":"NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB","2":"0 1 2 3 4 5 6 7 8 9 DC tB I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB EC FC"},D:{"1":"UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB GC","2":"0 1 2 3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB"},E:{"1":"B C K L G 0B qB rB 1B MC NC 2B 3B 4B 5B sB 6B 7B 8B 9B OC","2":"I v J D E F A HC zB IC JC KC LC"},F:{"1":"HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e","2":"0 1 2 3 4 5 6 7 8 9 F B C G M N O w g x y z AB BB CB DB EB FB GB PC QC RC SC qB AC TC rB"},G:{"1":"cC dC eC fC gC hC iC jC kC lC mC nC 2B 3B 4B 5B sB 6B 7B 8B 9B","2":"E zB UC BC VC WC XC YC ZC aC bC"},H:{"2":"oC"},I:{"1":"f","2":"tB I pC qC rC sC BC tC uC"},J:{"2":"D","16":"A"},K:{"1":"h","2":"A B C qB AC rB"},L:{"1":"H"},M:{"1":"H"},N:{"2":"A B"},O:{"1":"vC"},P:{"1":"g xC yC zC 0C 0B 1C 2C 3C 4C 5C sB 6C 7C 8C","2":"I wC"},Q:{"1":"1B"},R:{"1":"9C"},S:{"1":"AD BD"}},B:6,C:"Object.entries"};
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"CanonicalizeTimeZoneName.d.ts","sourceRoot":"","sources":["../../../../../packages/ecma402-abstract/CanonicalizeTimeZoneName.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,wBAAgB,wBAAwB,CACtC,EAAE,EAAE,MAAM,EACV,EACE,MAAM,EACN,cAAc,GACf,EAAE;IACD,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;IAC/B,cAAc,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CACvC,UAgBF"}
|
||||
@@ -0,0 +1,697 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
'use strict';
|
||||
|
||||
module.exports = Writable;
|
||||
/* <replacement> */
|
||||
|
||||
function WriteReq(chunk, encoding, cb) {
|
||||
this.chunk = chunk;
|
||||
this.encoding = encoding;
|
||||
this.callback = cb;
|
||||
this.next = null;
|
||||
} // It seems a linked list but it is not
|
||||
// there will be only 2 of these for each stream
|
||||
|
||||
|
||||
function CorkedRequest(state) {
|
||||
var _this = this;
|
||||
|
||||
this.next = null;
|
||||
this.entry = null;
|
||||
|
||||
this.finish = function () {
|
||||
onCorkedFinish(_this, state);
|
||||
};
|
||||
}
|
||||
/* </replacement> */
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
|
||||
var Duplex;
|
||||
/*</replacement>*/
|
||||
|
||||
Writable.WritableState = WritableState;
|
||||
/*<replacement>*/
|
||||
|
||||
var internalUtil = {
|
||||
deprecate: require('util-deprecate')
|
||||
};
|
||||
/*</replacement>*/
|
||||
|
||||
/*<replacement>*/
|
||||
|
||||
var Stream = require('./internal/streams/stream');
|
||||
/*</replacement>*/
|
||||
|
||||
|
||||
var Buffer = require('buffer').Buffer;
|
||||
|
||||
var OurUint8Array = global.Uint8Array || function () {};
|
||||
|
||||
function _uint8ArrayToBuffer(chunk) {
|
||||
return Buffer.from(chunk);
|
||||
}
|
||||
|
||||
function _isUint8Array(obj) {
|
||||
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
|
||||
}
|
||||
|
||||
var destroyImpl = require('./internal/streams/destroy');
|
||||
|
||||
var _require = require('./internal/streams/state'),
|
||||
getHighWaterMark = _require.getHighWaterMark;
|
||||
|
||||
var _require$codes = require('../errors').codes,
|
||||
ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE,
|
||||
ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED,
|
||||
ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK,
|
||||
ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE,
|
||||
ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED,
|
||||
ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES,
|
||||
ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END,
|
||||
ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING;
|
||||
|
||||
var errorOrDestroy = destroyImpl.errorOrDestroy;
|
||||
|
||||
require('inherits')(Writable, Stream);
|
||||
|
||||
function nop() {}
|
||||
|
||||
function WritableState(options, stream, isDuplex) {
|
||||
Duplex = Duplex || require('./_stream_duplex');
|
||||
options = options || {}; // Duplex streams are both readable and writable, but share
|
||||
// the same options object.
|
||||
// However, some cases require setting options to different
|
||||
// values for the readable and the writable sides of the duplex stream,
|
||||
// e.g. options.readableObjectMode vs. options.writableObjectMode, etc.
|
||||
|
||||
if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
|
||||
this.objectMode = !!options.objectMode;
|
||||
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write()
|
||||
|
||||
this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called
|
||||
|
||||
this.finalCalled = false; // drain event flag.
|
||||
|
||||
this.needDrain = false; // at the start of calling end()
|
||||
|
||||
this.ending = false; // when end() has been called, and returned
|
||||
|
||||
this.ended = false; // when 'finish' is emitted
|
||||
|
||||
this.finished = false; // has it been destroyed
|
||||
|
||||
this.destroyed = false; // should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
|
||||
var noDecode = options.decodeStrings === false;
|
||||
this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
|
||||
this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
|
||||
this.length = 0; // a flag to see when we're in the middle of a write.
|
||||
|
||||
this.writing = false; // when true all writes will be buffered until .uncork() call
|
||||
|
||||
this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
|
||||
this.sync = true; // a flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
|
||||
this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb)
|
||||
|
||||
this.onwrite = function (er) {
|
||||
onwrite(stream, er);
|
||||
}; // the callback that the user supplies to write(chunk,encoding,cb)
|
||||
|
||||
|
||||
this.writecb = null; // the amount that is being written when _write is called.
|
||||
|
||||
this.writelen = 0;
|
||||
this.bufferedRequest = null;
|
||||
this.lastBufferedRequest = null; // number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted
|
||||
|
||||
this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams
|
||||
|
||||
this.prefinished = false; // True if the error was already emitted and should not be thrown again
|
||||
|
||||
this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true.
|
||||
|
||||
this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end')
|
||||
|
||||
this.autoDestroy = !!options.autoDestroy; // count buffered requests
|
||||
|
||||
this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always
|
||||
// one allocated and free to use, and we maintain at most two
|
||||
|
||||
this.corkedRequestsFree = new CorkedRequest(this);
|
||||
}
|
||||
|
||||
WritableState.prototype.getBuffer = function getBuffer() {
|
||||
var current = this.bufferedRequest;
|
||||
var out = [];
|
||||
|
||||
while (current) {
|
||||
out.push(current);
|
||||
current = current.next;
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
(function () {
|
||||
try {
|
||||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||||
get: internalUtil.deprecate(function writableStateBufferGetter() {
|
||||
return this.getBuffer();
|
||||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
|
||||
});
|
||||
} catch (_) {}
|
||||
})(); // Test _writableState for inheritance to account for Duplex streams,
|
||||
// whose prototype chain only points to Readable.
|
||||
|
||||
|
||||
var realHasInstance;
|
||||
|
||||
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
|
||||
realHasInstance = Function.prototype[Symbol.hasInstance];
|
||||
Object.defineProperty(Writable, Symbol.hasInstance, {
|
||||
value: function value(object) {
|
||||
if (realHasInstance.call(this, object)) return true;
|
||||
if (this !== Writable) return false;
|
||||
return object && object._writableState instanceof WritableState;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
realHasInstance = function realHasInstance(object) {
|
||||
return object instanceof this;
|
||||
};
|
||||
}
|
||||
|
||||
function Writable(options) {
|
||||
Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too.
|
||||
// `realHasInstance` is necessary because using plain `instanceof`
|
||||
// would return false, as no `_writableState` property is attached.
|
||||
// Trying to use the custom `instanceof` for Writable here will also break the
|
||||
// Node.js LazyTransform implementation, which has a non-trivial getter for
|
||||
// `_writableState` that would lead to infinite recursion.
|
||||
// Checking for a Stream.Duplex instance is faster here instead of inside
|
||||
// the WritableState constructor, at least with V8 6.5
|
||||
|
||||
var isDuplex = this instanceof Duplex;
|
||||
if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options);
|
||||
this._writableState = new WritableState(options, this, isDuplex); // legacy.
|
||||
|
||||
this.writable = true;
|
||||
|
||||
if (options) {
|
||||
if (typeof options.write === 'function') this._write = options.write;
|
||||
if (typeof options.writev === 'function') this._writev = options.writev;
|
||||
if (typeof options.destroy === 'function') this._destroy = options.destroy;
|
||||
if (typeof options.final === 'function') this._final = options.final;
|
||||
}
|
||||
|
||||
Stream.call(this);
|
||||
} // Otherwise people can pipe Writable streams, which is just wrong.
|
||||
|
||||
|
||||
Writable.prototype.pipe = function () {
|
||||
errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE());
|
||||
};
|
||||
|
||||
function writeAfterEnd(stream, cb) {
|
||||
var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb
|
||||
|
||||
errorOrDestroy(stream, er);
|
||||
process.nextTick(cb, er);
|
||||
} // Checks that a user-supplied chunk is valid, especially for the particular
|
||||
// mode the stream is in. Currently this means that `null` is never accepted
|
||||
// and undefined/non-string values are only allowed in object mode.
|
||||
|
||||
|
||||
function validChunk(stream, state, chunk, cb) {
|
||||
var er;
|
||||
|
||||
if (chunk === null) {
|
||||
er = new ERR_STREAM_NULL_VALUES();
|
||||
} else if (typeof chunk !== 'string' && !state.objectMode) {
|
||||
er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
|
||||
}
|
||||
|
||||
if (er) {
|
||||
errorOrDestroy(stream, er);
|
||||
process.nextTick(cb, er);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
Writable.prototype.write = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
var ret = false;
|
||||
|
||||
var isBuf = !state.objectMode && _isUint8Array(chunk);
|
||||
|
||||
if (isBuf && !Buffer.isBuffer(chunk)) {
|
||||
chunk = _uint8ArrayToBuffer(chunk);
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
||||
if (typeof cb !== 'function') cb = nop;
|
||||
if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
|
||||
state.pendingcb++;
|
||||
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
Writable.prototype.cork = function () {
|
||||
this._writableState.corked++;
|
||||
};
|
||||
|
||||
Writable.prototype.uncork = function () {
|
||||
var state = this._writableState;
|
||||
|
||||
if (state.corked) {
|
||||
state.corked--;
|
||||
if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
||||
}
|
||||
};
|
||||
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
||||
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding);
|
||||
this._writableState.defaultEncoding = encoding;
|
||||
return this;
|
||||
};
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'writableBuffer', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState && this._writableState.getBuffer();
|
||||
}
|
||||
});
|
||||
|
||||
function decodeChunk(state, chunk, encoding) {
|
||||
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
||||
chunk = Buffer.from(chunk, encoding);
|
||||
}
|
||||
|
||||
return chunk;
|
||||
}
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.highWaterMark;
|
||||
}
|
||||
}); // if we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
|
||||
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
|
||||
if (!isBuf) {
|
||||
var newChunk = decodeChunk(state, chunk, encoding);
|
||||
|
||||
if (chunk !== newChunk) {
|
||||
isBuf = true;
|
||||
encoding = 'buffer';
|
||||
chunk = newChunk;
|
||||
}
|
||||
}
|
||||
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
state.length += len;
|
||||
var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false.
|
||||
|
||||
if (!ret) state.needDrain = true;
|
||||
|
||||
if (state.writing || state.corked) {
|
||||
var last = state.lastBufferedRequest;
|
||||
state.lastBufferedRequest = {
|
||||
chunk: chunk,
|
||||
encoding: encoding,
|
||||
isBuf: isBuf,
|
||||
callback: cb,
|
||||
next: null
|
||||
};
|
||||
|
||||
if (last) {
|
||||
last.next = state.lastBufferedRequest;
|
||||
} else {
|
||||
state.bufferedRequest = state.lastBufferedRequest;
|
||||
}
|
||||
|
||||
state.bufferedRequestCount += 1;
|
||||
} else {
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len;
|
||||
state.writecb = cb;
|
||||
state.writing = true;
|
||||
state.sync = true;
|
||||
if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
||||
state.sync = false;
|
||||
}
|
||||
|
||||
function onwriteError(stream, state, sync, er, cb) {
|
||||
--state.pendingcb;
|
||||
|
||||
if (sync) {
|
||||
// defer the callback if we are being called synchronously
|
||||
// to avoid piling up things on the stack
|
||||
process.nextTick(cb, er); // this can emit finish, and it will always happen
|
||||
// after error
|
||||
|
||||
process.nextTick(finishMaybe, stream, state);
|
||||
stream._writableState.errorEmitted = true;
|
||||
errorOrDestroy(stream, er);
|
||||
} else {
|
||||
// the caller expect this to happen before if
|
||||
// it is async
|
||||
cb(er);
|
||||
stream._writableState.errorEmitted = true;
|
||||
errorOrDestroy(stream, er); // this can emit finish, but finish must
|
||||
// always follow error
|
||||
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
}
|
||||
|
||||
function onwriteStateUpdate(state) {
|
||||
state.writing = false;
|
||||
state.writecb = null;
|
||||
state.length -= state.writelen;
|
||||
state.writelen = 0;
|
||||
}
|
||||
|
||||
function onwrite(stream, er) {
|
||||
var state = stream._writableState;
|
||||
var sync = state.sync;
|
||||
var cb = state.writecb;
|
||||
if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK();
|
||||
onwriteStateUpdate(state);
|
||||
if (er) onwriteError(stream, state, sync, er, cb);else {
|
||||
// Check if we're actually ready to finish, but don't emit yet
|
||||
var finished = needFinish(state) || stream.destroyed;
|
||||
|
||||
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
||||
clearBuffer(stream, state);
|
||||
}
|
||||
|
||||
if (sync) {
|
||||
process.nextTick(afterWrite, stream, state, finished, cb);
|
||||
} else {
|
||||
afterWrite(stream, state, finished, cb);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function afterWrite(stream, state, finished, cb) {
|
||||
if (!finished) onwriteDrain(stream, state);
|
||||
state.pendingcb--;
|
||||
cb();
|
||||
finishMaybe(stream, state);
|
||||
} // Must force callback to be called on nextTick, so that we don't
|
||||
// emit 'drain' before the write() consumer gets the 'false' return
|
||||
// value, and has a chance to attach a 'drain' listener.
|
||||
|
||||
|
||||
function onwriteDrain(stream, state) {
|
||||
if (state.length === 0 && state.needDrain) {
|
||||
state.needDrain = false;
|
||||
stream.emit('drain');
|
||||
}
|
||||
} // if there's something in the buffer waiting, then process it
|
||||
|
||||
|
||||
function clearBuffer(stream, state) {
|
||||
state.bufferProcessing = true;
|
||||
var entry = state.bufferedRequest;
|
||||
|
||||
if (stream._writev && entry && entry.next) {
|
||||
// Fast case, write everything using _writev()
|
||||
var l = state.bufferedRequestCount;
|
||||
var buffer = new Array(l);
|
||||
var holder = state.corkedRequestsFree;
|
||||
holder.entry = entry;
|
||||
var count = 0;
|
||||
var allBuffers = true;
|
||||
|
||||
while (entry) {
|
||||
buffer[count] = entry;
|
||||
if (!entry.isBuf) allBuffers = false;
|
||||
entry = entry.next;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
buffer.allBuffers = allBuffers;
|
||||
doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time
|
||||
// as the hot path ends with doWrite
|
||||
|
||||
state.pendingcb++;
|
||||
state.lastBufferedRequest = null;
|
||||
|
||||
if (holder.next) {
|
||||
state.corkedRequestsFree = holder.next;
|
||||
holder.next = null;
|
||||
} else {
|
||||
state.corkedRequestsFree = new CorkedRequest(state);
|
||||
}
|
||||
|
||||
state.bufferedRequestCount = 0;
|
||||
} else {
|
||||
// Slow case, write chunks one-by-one
|
||||
while (entry) {
|
||||
var chunk = entry.chunk;
|
||||
var encoding = entry.encoding;
|
||||
var cb = entry.callback;
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
entry = entry.next;
|
||||
state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then
|
||||
// it means that we need to wait until it does.
|
||||
// also, that means that the chunk and cb are currently
|
||||
// being processed, so move the buffer counter past them.
|
||||
|
||||
if (state.writing) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (entry === null) state.lastBufferedRequest = null;
|
||||
}
|
||||
|
||||
state.bufferedRequest = entry;
|
||||
state.bufferProcessing = false;
|
||||
}
|
||||
|
||||
Writable.prototype._write = function (chunk, encoding, cb) {
|
||||
cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()'));
|
||||
};
|
||||
|
||||
Writable.prototype._writev = null;
|
||||
|
||||
Writable.prototype.end = function (chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk;
|
||||
chunk = null;
|
||||
encoding = null;
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks
|
||||
|
||||
if (state.corked) {
|
||||
state.corked = 1;
|
||||
this.uncork();
|
||||
} // ignore unnecessary end() calls.
|
||||
|
||||
|
||||
if (!state.ending) endWritable(this, state, cb);
|
||||
return this;
|
||||
};
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'writableLength', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
return this._writableState.length;
|
||||
}
|
||||
});
|
||||
|
||||
function needFinish(state) {
|
||||
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
||||
}
|
||||
|
||||
function callFinal(stream, state) {
|
||||
stream._final(function (err) {
|
||||
state.pendingcb--;
|
||||
|
||||
if (err) {
|
||||
errorOrDestroy(stream, err);
|
||||
}
|
||||
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
finishMaybe(stream, state);
|
||||
});
|
||||
}
|
||||
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished && !state.finalCalled) {
|
||||
if (typeof stream._final === 'function' && !state.destroyed) {
|
||||
state.pendingcb++;
|
||||
state.finalCalled = true;
|
||||
process.nextTick(callFinal, stream, state);
|
||||
} else {
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function finishMaybe(stream, state) {
|
||||
var need = needFinish(state);
|
||||
|
||||
if (need) {
|
||||
prefinish(stream, state);
|
||||
|
||||
if (state.pendingcb === 0) {
|
||||
state.finished = true;
|
||||
stream.emit('finish');
|
||||
|
||||
if (state.autoDestroy) {
|
||||
// In case of duplex streams we need a way to detect
|
||||
// if the readable side is ready for autoDestroy as well
|
||||
var rState = stream._readableState;
|
||||
|
||||
if (!rState || rState.autoDestroy && rState.endEmitted) {
|
||||
stream.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return need;
|
||||
}
|
||||
|
||||
function endWritable(stream, state, cb) {
|
||||
state.ending = true;
|
||||
finishMaybe(stream, state);
|
||||
|
||||
if (cb) {
|
||||
if (state.finished) process.nextTick(cb);else stream.once('finish', cb);
|
||||
}
|
||||
|
||||
state.ended = true;
|
||||
stream.writable = false;
|
||||
}
|
||||
|
||||
function onCorkedFinish(corkReq, state, err) {
|
||||
var entry = corkReq.entry;
|
||||
corkReq.entry = null;
|
||||
|
||||
while (entry) {
|
||||
var cb = entry.callback;
|
||||
state.pendingcb--;
|
||||
cb(err);
|
||||
entry = entry.next;
|
||||
} // reuse the free corkReq.
|
||||
|
||||
|
||||
state.corkedRequestsFree.next = corkReq;
|
||||
}
|
||||
|
||||
Object.defineProperty(Writable.prototype, 'destroyed', {
|
||||
// making it explicit this property is not enumerable
|
||||
// because otherwise some prototype manipulation in
|
||||
// userland will fail
|
||||
enumerable: false,
|
||||
get: function get() {
|
||||
if (this._writableState === undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this._writableState.destroyed;
|
||||
},
|
||||
set: function set(value) {
|
||||
// we ignore the value if the stream
|
||||
// has not been initialized yet
|
||||
if (!this._writableState) {
|
||||
return;
|
||||
} // backward compatibility, the user is explicitly
|
||||
// managing destroyed
|
||||
|
||||
|
||||
this._writableState.destroyed = value;
|
||||
}
|
||||
});
|
||||
Writable.prototype.destroy = destroyImpl.destroy;
|
||||
Writable.prototype._undestroy = destroyImpl.undestroy;
|
||||
|
||||
Writable.prototype._destroy = function (err, cb) {
|
||||
cb(err);
|
||||
};
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"name": "svelte-hmr",
|
||||
"version": "0.14.12",
|
||||
"description": "Bundler agnostic HMR utils for Svelte 3",
|
||||
"main": "index.js",
|
||||
"author": "rixo <rixo@rixo.fr>",
|
||||
"license": "ISC",
|
||||
"homepage": "https://github.com/sveltejs/svelte-hmr",
|
||||
"bugs": {
|
||||
"url": "https://github.com/sveltejs/svelte-hmr/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/sveltejs/svelte-hmr",
|
||||
"directory": "packages/svelte-hmr"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"lib",
|
||||
"runtime"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^12.20 || ^14.13.1 || >= 16"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"svelte": ">=3.19.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dotenv": "^10.0.0",
|
||||
"prettier": "^1.19.1",
|
||||
"svelte": "^3.38.3",
|
||||
"tap-mocha-reporter": "^5.0.1",
|
||||
"zoar": "^0.3.0",
|
||||
"zorax": "^0.0.12"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint '**/*.{js,cjs,mjs}'",
|
||||
"lint:fix": "pnpm run lint --fix",
|
||||
"format": "prettier '**/*.{js,cjs,mjs}' --check",
|
||||
"format:fix": "pnpm run format --write",
|
||||
"test:fancy": "zoar --pipe 'tap-mocha-reporter spec'",
|
||||
"test": "zoar --exit"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const fsStat = require("@nodelib/fs.stat");
|
||||
const fs = require("./adapters/fs");
|
||||
class Settings {
|
||||
constructor(_options = {}) {
|
||||
this._options = _options;
|
||||
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
|
||||
this.fs = fs.createFileSystemAdapter(this._options.fs);
|
||||
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
|
||||
this.stats = this._getValue(this._options.stats, false);
|
||||
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
|
||||
this.fsStatSettings = new fsStat.Settings({
|
||||
followSymbolicLink: this.followSymbolicLinks,
|
||||
fs: this.fs,
|
||||
throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
|
||||
});
|
||||
}
|
||||
_getValue(option, value) {
|
||||
return option !== null && option !== void 0 ? option : value;
|
||||
}
|
||||
}
|
||||
exports.default = Settings;
|
||||
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"name": "es-set-tostringtag",
|
||||
"version": "2.0.1",
|
||||
"description": "A helper to optimistically set Symbol.toStringTag, when possible.",
|
||||
"main": "index.js",
|
||||
"exports": {
|
||||
".": "./index.js",
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"prepack": "npmignore --auto --commentLines=autogenerated",
|
||||
"prepublishOnly": "safe-publish-latest",
|
||||
"prepublish": "not-in-publish || npm run prepublishOnly",
|
||||
"prelint": "evalmd README.md",
|
||||
"lint": "eslint --ext=js,mjs .",
|
||||
"pretest": "npm run lint",
|
||||
"tests-only": "tape 'test/**/*.js'",
|
||||
"test": "npm run tests-only",
|
||||
"posttest": "aud --production",
|
||||
"version": "auto-changelog && git add CHANGELOG.md",
|
||||
"postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\""
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/es-shims/es-set-tostringtag.git"
|
||||
},
|
||||
"author": "Jordan Harband <ljharb@gmail.com>",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/es-shims/es-set-tostringtag/issues"
|
||||
},
|
||||
"homepage": "https://github.com/es-shims/es-set-tostringtag#readme",
|
||||
"devDependencies": {
|
||||
"@ljharb/eslint-config": "^21.0.1",
|
||||
"aud": "^2.0.2",
|
||||
"auto-changelog": "^2.4.0",
|
||||
"eslint": "=8.8.0",
|
||||
"evalmd": "^0.0.19",
|
||||
"in-publish": "^2.0.1",
|
||||
"npmignore": "^0.3.0",
|
||||
"safe-publish-latest": "^2.0.0",
|
||||
"tape": "^5.6.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"get-intrinsic": "^1.1.3",
|
||||
"has": "^1.0.3",
|
||||
"has-tostringtag": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"auto-changelog": {
|
||||
"output": "CHANGELOG.md",
|
||||
"template": "keepachangelog",
|
||||
"unreleased": false,
|
||||
"commitLimit": false,
|
||||
"backfillLimit": false,
|
||||
"hideCredit": true
|
||||
},
|
||||
"testling": {
|
||||
"files": "./test/index.js"
|
||||
},
|
||||
"publishConfig": {
|
||||
"ignore": [
|
||||
".github/workflows"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
'use strict';
|
||||
|
||||
var GetIntrinsic = require('get-intrinsic');
|
||||
|
||||
var callBound = require('call-bind/callBound');
|
||||
|
||||
var $TypeError = GetIntrinsic('%TypeError%');
|
||||
var $indexOf = callBound('Array.prototype.indexOf', true) || callBound('String.prototype.indexOf');
|
||||
var $push = callBound('Array.prototype.push');
|
||||
|
||||
var Get = require('./Get');
|
||||
var IsArray = require('./IsArray');
|
||||
var LengthOfArrayLike = require('./LengthOfArrayLike');
|
||||
var ToString = require('./ToString');
|
||||
var Type = require('./Type');
|
||||
|
||||
var defaultElementTypes = ['Undefined', 'Null', 'Boolean', 'String', 'Symbol', 'Number', 'BigInt', 'Object'];
|
||||
|
||||
// https://262.ecma-international.org/11.0/#sec-createlistfromarraylike
|
||||
|
||||
module.exports = function CreateListFromArrayLike(obj) {
|
||||
var elementTypes = arguments.length > 1
|
||||
? arguments[1]
|
||||
: defaultElementTypes;
|
||||
|
||||
if (Type(obj) !== 'Object') {
|
||||
throw new $TypeError('Assertion failed: `obj` must be an Object');
|
||||
}
|
||||
if (!IsArray(elementTypes)) {
|
||||
throw new $TypeError('Assertion failed: `elementTypes`, if provided, must be an array');
|
||||
}
|
||||
var len = LengthOfArrayLike(obj);
|
||||
var list = [];
|
||||
var index = 0;
|
||||
while (index < len) {
|
||||
var indexName = ToString(index);
|
||||
var next = Get(obj, indexName);
|
||||
var nextType = Type(next);
|
||||
if ($indexOf(elementTypes, nextType) < 0) {
|
||||
throw new $TypeError('item type ' + nextType + ' is not a valid elementType');
|
||||
}
|
||||
$push(list, next);
|
||||
index += 1;
|
||||
}
|
||||
return list;
|
||||
};
|
||||
@@ -0,0 +1 @@
|
||||
{"name":"wildcard-match","version":"5.1.2","files":{"LICENSE":{"checkedAt":1678883672788,"integrity":"sha512-NKPFFdJUKgrdUq/Wi1aLNFVPl7wQL1D80D7/uR0BW/MdNA8JswrzHuxE3Sfa/0CJqnihsSGOxwznYoyG3faqZw==","mode":420,"size":745},"build/index.js":{"checkedAt":1678883672788,"integrity":"sha512-Bkou8k5/ewoB6MHlUdJPsU3ZEurL38j6g6tVk7er1TvV3v2GwyGjAFrUCakJH4FMe6m6i2bCHHOruW8Bx4ihnw==","mode":438,"size":5432},"build/index.umd.js":{"checkedAt":1678883672788,"integrity":"sha512-/HyeCpjqUmVt3zZa9waO0K/qd1N9sTMZaV/dybap7LGf0R6ocY7nM0UK+SRpCa9SmZrF8W/RkNpWy4VJoUNJFg==","mode":438,"size":1949},"package.json":{"checkedAt":1678883672788,"integrity":"sha512-O6hCuqCgM+IvHaP/l4+bYupG7MwKPuqwVtkdsIqdBa5d+jUMbu5yrFTgvShiFtb3wam5+crgHH2z6rDttZkaSQ==","mode":420,"size":1753},"build/index.es.mjs.map":{"checkedAt":1678883672790,"integrity":"sha512-Hbe1Np7XEKdRZjVvBt6sn3VPalRhOIVt8CCDm1wQ1drEa5ZtrOHyOV7WwUvW1xPe2X9uMICPu2febIt77pkutA==","mode":438,"size":10859},"build/index.js.map":{"checkedAt":1678883672792,"integrity":"sha512-NLjS+KtJd/X8/CiU6EJK67joefVt/8ezwaj7Zhq7mwiF6owuNo9n3m0oIwj24qXnp6E6mjYYgMo3dOdZqZtnHg==","mode":438,"size":10857},"build/index.umd.js.map":{"checkedAt":1678883672792,"integrity":"sha512-r8Kr2bXJsypCyHLtr39T/5Xk+rTdd3/YmtbfOJLQoBPR47EUKk+RPnoJQDTK1JdA6R98o2IJ5rs50X5WbmtF/A==","mode":438,"size":8986},"README.md":{"checkedAt":1678883672793,"integrity":"sha512-FRIeJBwNDjwGEnFO8NPLXnnJ4LVYiExGZnDxutrQfzobcOzuwlW5l9DliwP7Pv1BM6d+zf43ayPgNiO8sufhPg==","mode":420,"size":10849},"build/index.es.mjs":{"checkedAt":1678883672793,"integrity":"sha512-yDJnaHTnNN3sgSb4xWkPmFkdCMQJp8AsrQPTVTMvQisgT4KUFCqFMDV/ETGLz04v3wNSqydVNLqNPspSU1VMDg==","mode":438,"size":5419},"build/index.d.ts":{"checkedAt":1678883672794,"integrity":"sha512-X4qDvp4vWMj+LYhNwTr012E+surffq6LiRTswtWC9sCdUDG92l1aNoxECo1+60gZlAXs4Yb/5FAAHdQooMNYQQ==","mode":438,"size":1314},"build/index.umd.d.ts":{"checkedAt":1678883672794,"integrity":"sha512-X4qDvp4vWMj+LYhNwTr012E+surffq6LiRTswtWC9sCdUDG92l1aNoxECo1+60gZlAXs4Yb/5FAAHdQooMNYQQ==","mode":438,"size":1314},"build/index.es.d.ts":{"checkedAt":1678883672794,"integrity":"sha512-AVgI3fRXwCRahofjl/cYQ2wf80fyKWKuFiE1vXbGewHoKeirizBgAADlDFvEpN9uLqmCEtWGyV65XKFdrRnJ2Q==","mode":438,"size":1327}}}
|
||||
@@ -0,0 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [ljharb]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: npm/define-properties
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
@@ -0,0 +1 @@
|
||||
{"name":"fill-range","version":"7.0.1","files":{"package.json":{"checkedAt":1678883670924,"integrity":"sha512-O+dv7Za/EVaiHYiAr5QmRcJz6dQc0OE+vcUztCtBBMVJQ3n+4Np2iEbSqOq0R6Zs8805P5n0HPIwR1gEDhMZKQ==","mode":420,"size":1459},"LICENSE":{"checkedAt":1678883670924,"integrity":"sha512-cjkrzNiWTIjsiqPYFXRqK2pEZtnHyo9CjX0PPiuxFnTvSUyjNciyVe7lglwIene7RaXWACXzGLeKZOGb7M0jxw==","mode":420,"size":1091},"index.js":{"checkedAt":1678883670924,"integrity":"sha512-9+zGmXpDtoYCrWasViDnKRlW1UTYRM/o4DdYi5I87bwCe+aXpwqRi72o9BCdus1XsMx2Ys4ey7YrMEYsSYev2Q==","mode":420,"size":6315},"README.md":{"checkedAt":1678883670925,"integrity":"sha512-q49lxfrZJcdxLNm44d27USot92YVViSTJqjJYxlXgg1GDH1Pd4lOhAxqXa8OsXX0w9OIbmzVMmw2s0LSx7ZWgQ==","mode":420,"size":7486}}}
|
||||
@@ -0,0 +1,16 @@
|
||||
var inspect = require('../');
|
||||
var test = require('tape');
|
||||
|
||||
test('circular', function (t) {
|
||||
t.plan(2);
|
||||
var obj = { a: 1, b: [3, 4] };
|
||||
obj.c = obj;
|
||||
t.equal(inspect(obj), '{ a: 1, b: [ 3, 4 ], c: [Circular] }');
|
||||
|
||||
var double = {};
|
||||
double.a = [double];
|
||||
double.b = {};
|
||||
double.b.inner = double.b;
|
||||
double.b.obj = double;
|
||||
t.equal(inspect(double), '{ a: [ [Circular] ], b: { inner: [Circular], obj: [Circular] } }');
|
||||
});
|
||||
@@ -0,0 +1,94 @@
|
||||
import { SchedulerLike } from '../types';
|
||||
import { Observable } from '../Observable';
|
||||
import { EMPTY } from './empty';
|
||||
|
||||
export function range(start: number, count?: number): Observable<number>;
|
||||
|
||||
/**
|
||||
* @deprecated The `scheduler` parameter will be removed in v8. Use `range(start, count).pipe(observeOn(scheduler))` instead. Details: Details: https://rxjs.dev/deprecations/scheduler-argument
|
||||
*/
|
||||
export function range(start: number, count: number | undefined, scheduler: SchedulerLike): Observable<number>;
|
||||
|
||||
/**
|
||||
* Creates an Observable that emits a sequence of numbers within a specified
|
||||
* range.
|
||||
*
|
||||
* <span class="informal">Emits a sequence of numbers in a range.</span>
|
||||
*
|
||||
* 
|
||||
*
|
||||
* `range` operator emits a range of sequential integers, in order, where you
|
||||
* select the `start` of the range and its `length`. By default, uses no
|
||||
* {@link SchedulerLike} and just delivers the notifications synchronously, but may use
|
||||
* an optional {@link SchedulerLike} to regulate those deliveries.
|
||||
*
|
||||
* ## Example
|
||||
*
|
||||
* Produce a range of numbers
|
||||
*
|
||||
* ```ts
|
||||
* import { range } from 'rxjs';
|
||||
*
|
||||
* const numbers = range(1, 3);
|
||||
*
|
||||
* numbers.subscribe({
|
||||
* next: value => console.log(value),
|
||||
* complete: () => console.log('Complete!')
|
||||
* });
|
||||
*
|
||||
* // Logs:
|
||||
* // 1
|
||||
* // 2
|
||||
* // 3
|
||||
* // 'Complete!'
|
||||
* ```
|
||||
*
|
||||
* @see {@link timer}
|
||||
* @see {@link interval}
|
||||
*
|
||||
* @param {number} [start=0] The value of the first integer in the sequence.
|
||||
* @param {number} count The number of sequential integers to generate.
|
||||
* @param {SchedulerLike} [scheduler] A {@link SchedulerLike} to use for scheduling
|
||||
* the emissions of the notifications.
|
||||
* @return {Observable} An Observable of numbers that emits a finite range of
|
||||
* sequential integers.
|
||||
*/
|
||||
export function range(start: number, count?: number, scheduler?: SchedulerLike): Observable<number> {
|
||||
if (count == null) {
|
||||
// If one argument was passed, it's the count, not the start.
|
||||
count = start;
|
||||
start = 0;
|
||||
}
|
||||
|
||||
if (count <= 0) {
|
||||
// No count? We're going nowhere. Return EMPTY.
|
||||
return EMPTY;
|
||||
}
|
||||
|
||||
// Where the range should stop.
|
||||
const end = count + start;
|
||||
|
||||
return new Observable(
|
||||
scheduler
|
||||
? // The deprecated scheduled path.
|
||||
(subscriber) => {
|
||||
let n = start;
|
||||
return scheduler.schedule(function () {
|
||||
if (n < end) {
|
||||
subscriber.next(n++);
|
||||
this.schedule();
|
||||
} else {
|
||||
subscriber.complete();
|
||||
}
|
||||
});
|
||||
}
|
||||
: // Standard synchronous range.
|
||||
(subscriber) => {
|
||||
let n = start;
|
||||
while (n < end && !subscriber.closed) {
|
||||
subscriber.next(n++);
|
||||
}
|
||||
subscriber.complete();
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = require('./async').all;
|
||||
@@ -0,0 +1,21 @@
|
||||
'use strict';
|
||||
|
||||
var GetIntrinsic = require('get-intrinsic');
|
||||
|
||||
var $TypeError = GetIntrinsic('%TypeError%');
|
||||
|
||||
var isNaN = require('../../helpers/isNaN');
|
||||
var Type = require('../Type');
|
||||
|
||||
// https://262.ecma-international.org/11.0/#sec-numeric-types-number-equal
|
||||
|
||||
module.exports = function NumberEqual(x, y) {
|
||||
if (Type(x) !== 'Number' || Type(y) !== 'Number') {
|
||||
throw new $TypeError('Assertion failed: `x` and `y` arguments must be Numbers');
|
||||
}
|
||||
if (isNaN(x) || isNaN(y)) {
|
||||
return false;
|
||||
}
|
||||
// shortcut for the actual spec mechanics
|
||||
return x === y;
|
||||
};
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"QueueAction.js","sourceRoot":"","sources":["../../../../src/internal/scheduler/QueueAction.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,6CAA4C;AAM5C;IAAoC,+BAAc;IAChD,qBAAsB,SAAyB,EAAY,IAAmD;QAA9G,YACE,kBAAM,SAAS,EAAE,IAAI,CAAC,SACvB;QAFqB,eAAS,GAAT,SAAS,CAAgB;QAAY,UAAI,GAAJ,IAAI,CAA+C;;IAE9G,CAAC;IAEM,8BAAQ,GAAf,UAAgB,KAAS,EAAE,KAAiB;QAAjB,sBAAA,EAAA,SAAiB;QAC1C,IAAI,KAAK,GAAG,CAAC,EAAE;YACb,OAAO,iBAAM,QAAQ,YAAC,KAAK,EAAE,KAAK,CAAC,CAAC;SACrC;QACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,6BAAO,GAAd,UAAe,KAAQ,EAAE,KAAa;QACpC,OAAO,KAAK,GAAG,CAAC,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,iBAAM,OAAO,YAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IAC9F,CAAC;IAES,oCAAc,GAAxB,UAAyB,SAAyB,EAAE,EAAgB,EAAE,KAAiB;QAAjB,sBAAA,EAAA,SAAiB;QAKrF,IAAI,CAAC,KAAK,IAAI,IAAI,IAAI,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,IAAI,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,EAAE;YACrE,OAAO,iBAAM,cAAc,YAAC,SAAS,EAAE,EAAE,EAAE,KAAK,CAAC,CAAC;SACnD;QAGD,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QAMtB,OAAO,CAAC,CAAC;IACX,CAAC;IACH,kBAAC;AAAD,CAAC,AArCD,CAAoC,yBAAW,GAqC9C;AArCY,kCAAW"}
|
||||
@@ -0,0 +1 @@
|
||||
{"name":"is-interactive","version":"2.0.0","files":{"license":{"checkedAt":1678883669302,"integrity":"sha512-0fM2/ycrxrltyaBKfQ748Ck23VlPUUBgNAR47ldf4B1V/HoXTfWBSk+vcshGKwEpmOynu4mOP5o+hyBfuRNa8g==","mode":420,"size":1117},"index.js":{"checkedAt":1678883671280,"integrity":"sha512-dXHfJORI51Ej+NRO7VEY1l3s0VV0XZkk7wLzmhCPWNyWgibVtNRvyXO/5OAGtVsQyUZNnnraaaH0nfdiYJOPvg==","mode":420,"size":181},"package.json":{"checkedAt":1678883671280,"integrity":"sha512-T/h3ijr54KYa5vwNuukq6WRoLQ3EeJe3DrPp+VnOX3Py1QMGUe7tohsQoH3NSpyYaKCrP9pTNJs6AfFM1bBieQ==","mode":420,"size":746},"readme.md":{"checkedAt":1678883671280,"integrity":"sha512-TPlWD0XDlxObnoQ6aGpU8grHioB3Z4B6pqWMaS3ch064JQk19ztS2ZBhtuzJQbLmQCYGzk2xCkpCj3UQiiM82A==","mode":420,"size":2070},"index.d.ts":{"checkedAt":1678883671280,"integrity":"sha512-NCeqlHRMXWZFsCT/R4kK9g9wGrBDXNO753A7T7QsGVOcS8lVY7UoM2Jja8+JY5w3+SR/TVdhD5V5epvxaJeWvA==","mode":420,"size":612}}}
|
||||
Reference in New Issue
Block a user