new license file version [CI SKIP]
This commit is contained in:
@@ -0,0 +1,27 @@
|
||||
import { Subject } from './Subject';
|
||||
export class BehaviorSubject extends Subject {
|
||||
constructor(_value) {
|
||||
super();
|
||||
this._value = _value;
|
||||
}
|
||||
get value() {
|
||||
return this.getValue();
|
||||
}
|
||||
_subscribe(subscriber) {
|
||||
const subscription = super._subscribe(subscriber);
|
||||
!subscription.closed && subscriber.next(this._value);
|
||||
return subscription;
|
||||
}
|
||||
getValue() {
|
||||
const { hasError, thrownError, _value } = this;
|
||||
if (hasError) {
|
||||
throw thrownError;
|
||||
}
|
||||
this._throwIfClosed();
|
||||
return _value;
|
||||
}
|
||||
next(value) {
|
||||
super.next((this._value = value));
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=BehaviorSubject.js.map
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"B","2":"J D E F A CC"},B:{"1":"C K L G M N O P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H"},C:{"1":"0 1 2 3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB FC","2":"DC tB EC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB GC"},E:{"1":"I v J D E F A B C K L G IC JC KC LC 0B qB rB 1B MC NC 2B 3B 4B 5B sB 6B 7B 8B 9B OC","2":"HC zB"},F:{"1":"0 1 2 3 4 5 6 7 8 9 G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e","2":"F B C PC QC RC SC qB AC TC rB"},G:{"1":"E zB UC BC VC WC XC YC ZC aC bC cC dC eC fC gC hC iC jC kC lC mC nC 2B 3B 4B 5B sB 6B 7B 8B 9B"},H:{"2":"oC"},I:{"1":"tB I f pC qC rC sC BC tC uC"},J:{"1":"D A"},K:{"1":"h","2":"A B C qB AC rB"},L:{"1":"H"},M:{"1":"H"},N:{"1":"B","2":"A"},O:{"1":"vC"},P:{"1":"I g wC xC yC zC 0C 0B 1C 2C 3C 4C 5C sB 6C 7C 8C"},Q:{"1":"1B"},R:{"1":"9C"},S:{"1":"AD BD"}},B:7,C:"CSS pointer-events (for HTML)"};
|
||||
@@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sampleTime = void 0;
|
||||
var async_1 = require("../scheduler/async");
|
||||
var sample_1 = require("./sample");
|
||||
var interval_1 = require("../observable/interval");
|
||||
function sampleTime(period, scheduler) {
|
||||
if (scheduler === void 0) { scheduler = async_1.asyncScheduler; }
|
||||
return sample_1.sample(interval_1.interval(period, scheduler));
|
||||
}
|
||||
exports.sampleTime = sampleTime;
|
||||
//# sourceMappingURL=sampleTime.js.map
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"windowWhen.js","sourceRoot":"","sources":["../../../../src/internal/operators/windowWhen.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAErC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAC;AACvC,OAAO,EAAE,wBAAwB,EAAE,MAAM,sBAAsB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AA8CpD,MAAM,UAAU,UAAU,CAAI,eAA2C;IACvE,OAAO,OAAO,CAAC,UAAC,MAAM,EAAE,UAAU;QAChC,IAAI,MAAyB,CAAC;QAC9B,IAAI,iBAA8C,CAAC;QAMnD,IAAM,WAAW,GAAG,UAAC,GAAQ;YAC3B,MAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACnB,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACxB,CAAC,CAAC;QAQF,IAAM,UAAU,GAAG;YAGjB,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,WAAW,EAAE,CAAC;YAGjC,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,QAAQ,EAAE,CAAC;YAGnB,MAAM,GAAG,IAAI,OAAO,EAAK,CAAC;YAC1B,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,CAAC;YAGvC,IAAI,eAAgC,CAAC;YACrC,IAAI;gBACF,eAAe,GAAG,SAAS,CAAC,eAAe,EAAE,CAAC,CAAC;aAChD;YAAC,OAAO,GAAG,EAAE;gBACZ,WAAW,CAAC,GAAG,CAAC,CAAC;gBACjB,OAAO;aACR;YAMD,eAAe,CAAC,SAAS,CAAC,CAAC,iBAAiB,GAAG,wBAAwB,CAAC,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC;QAC7H,CAAC,CAAC;QAGF,UAAU,EAAE,CAAC;QAGb,MAAM,CAAC,SAAS,CACd,wBAAwB,CACtB,UAAU,EACV,UAAC,KAAK,IAAK,OAAA,MAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAnB,CAAmB,EAC9B;YAEE,MAAO,CAAC,QAAQ,EAAE,CAAC;YACnB,UAAU,CAAC,QAAQ,EAAE,CAAC;QACxB,CAAC,EACD,WAAW,EACX;YAGE,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,WAAW,EAAE,CAAC;YACjC,MAAM,GAAG,IAAK,CAAC;QACjB,CAAC,CACF,CACF,CAAC;IACJ,CAAC,CAAC,CAAC;AACL,CAAC"}
|
||||
@@ -0,0 +1,33 @@
|
||||
/// <reference types="node" />
|
||||
import { Readable } from 'stream';
|
||||
import { UrlWithStringQuery } from 'url';
|
||||
/**
|
||||
* Async function that returns a `stream.Readable` instance to the
|
||||
* callback function that will output the contents of the given URI.
|
||||
*
|
||||
* For caching purposes, you can pass in a `stream` instance from a previous
|
||||
* `getUri()` call as a `cache: stream` option, and if the destination has
|
||||
* not changed since the last time the endpoint was retreived then the callback
|
||||
* will be invoked with an Error object with `code` set to "ENOTMODIFIED" and
|
||||
* `null` for the "stream" instance argument. In this case, you can skip
|
||||
* retreiving the file again and continue to use the previous payload.
|
||||
*
|
||||
* @param {String} uri URI to retrieve
|
||||
* @param {Object} opts optional "options" object
|
||||
* @param {Function} fn callback function
|
||||
* @api public
|
||||
*/
|
||||
declare function getUri(uri: string, fn: getUri.GetUriCallback): void;
|
||||
declare function getUri(uri: string, opts: getUri.GetUriOptions, fn: getUri.GetUriCallback): void;
|
||||
declare function getUri(uri: string, opts?: getUri.GetUriOptions): Promise<Readable>;
|
||||
declare namespace getUri {
|
||||
interface GetUriOptions {
|
||||
cache?: Readable;
|
||||
}
|
||||
type GetUriCallback = (err?: Error | null, res?: Readable) => void;
|
||||
type GetUriProtocol = (parsed: UrlWithStringQuery, opts: getUri.GetUriOptions) => Promise<Readable>;
|
||||
const protocols: {
|
||||
[key: string]: getUri.GetUriProtocol;
|
||||
};
|
||||
}
|
||||
export = getUri;
|
||||
@@ -0,0 +1,5 @@
|
||||
import { Fork } from "../types";
|
||||
export default function (fork: Fork): {
|
||||
(a: any, b: any, problemPath?: any): boolean;
|
||||
assert(a: any, b: any): void;
|
||||
};
|
||||
@@ -0,0 +1,17 @@
|
||||
import { Notification } from '../Notification';
|
||||
import { operate } from '../util/lift';
|
||||
import { createOperatorSubscriber } from './OperatorSubscriber';
|
||||
export function materialize() {
|
||||
return operate((source, subscriber) => {
|
||||
source.subscribe(createOperatorSubscriber(subscriber, (value) => {
|
||||
subscriber.next(Notification.createNext(value));
|
||||
}, () => {
|
||||
subscriber.next(Notification.createComplete());
|
||||
subscriber.complete();
|
||||
}, (err) => {
|
||||
subscriber.next(Notification.createError(err));
|
||||
subscriber.complete();
|
||||
}));
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=materialize.js.map
|
||||
@@ -0,0 +1,42 @@
|
||||
let Declaration = require('../declaration')
|
||||
|
||||
class WritingMode extends Declaration {
|
||||
insert(decl, prefix, prefixes) {
|
||||
if (prefix === '-ms-') {
|
||||
let cloned = this.set(this.clone(decl), prefix)
|
||||
|
||||
if (this.needCascade(decl)) {
|
||||
cloned.raws.before = this.calcBefore(prefixes, decl, prefix)
|
||||
}
|
||||
let direction = 'ltr'
|
||||
|
||||
decl.parent.nodes.forEach(i => {
|
||||
if (i.prop === 'direction') {
|
||||
if (i.value === 'rtl' || i.value === 'ltr') direction = i.value
|
||||
}
|
||||
})
|
||||
|
||||
cloned.value = WritingMode.msValues[direction][decl.value] || decl.value
|
||||
return decl.parent.insertBefore(decl, cloned)
|
||||
}
|
||||
|
||||
return super.insert(decl, prefix, prefixes)
|
||||
}
|
||||
}
|
||||
|
||||
WritingMode.names = ['writing-mode']
|
||||
|
||||
WritingMode.msValues = {
|
||||
ltr: {
|
||||
'horizontal-tb': 'lr-tb',
|
||||
'vertical-rl': 'tb-rl',
|
||||
'vertical-lr': 'tb-lr'
|
||||
},
|
||||
rtl: {
|
||||
'horizontal-tb': 'rl-tb',
|
||||
'vertical-rl': 'bt-rl',
|
||||
'vertical-lr': 'bt-lr'
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = WritingMode
|
||||
@@ -0,0 +1,271 @@
|
||||
const isWin = process.platform === 'win32';
|
||||
const SEP = isWin ? `\\\\+` : `\\/`;
|
||||
const SEP_ESC = isWin ? `\\\\` : `/`;
|
||||
const GLOBSTAR = `((?:[^/]*(?:/|$))*)`;
|
||||
const WILDCARD = `([^/]*)`;
|
||||
const GLOBSTAR_SEGMENT = `((?:[^${SEP_ESC}]*(?:${SEP_ESC}|$))*)`;
|
||||
const WILDCARD_SEGMENT = `([^${SEP_ESC}]*)`;
|
||||
|
||||
/**
|
||||
* Convert any glob pattern to a JavaScript Regexp object
|
||||
* @param {String} glob Glob pattern to convert
|
||||
* @param {Object} opts Configuration object
|
||||
* @param {Boolean} [opts.extended=false] Support advanced ext globbing
|
||||
* @param {Boolean} [opts.globstar=false] Support globstar
|
||||
* @param {Boolean} [opts.strict=true] be laissez faire about mutiple slashes
|
||||
* @param {Boolean} [opts.filepath=''] Parse as filepath for extra path related features
|
||||
* @param {String} [opts.flags=''] RegExp globs
|
||||
* @returns {Object} converted object with string, segments and RegExp object
|
||||
*/
|
||||
function globrex(glob, {extended = false, globstar = false, strict = false, filepath = false, flags = ''} = {}) {
|
||||
let regex = '';
|
||||
let segment = '';
|
||||
let path = { regex: '', segments: [] };
|
||||
|
||||
// If we are doing extended matching, this boolean is true when we are inside
|
||||
// a group (eg {*.html,*.js}), and false otherwise.
|
||||
let inGroup = false;
|
||||
let inRange = false;
|
||||
|
||||
// extglob stack. Keep track of scope
|
||||
const ext = [];
|
||||
|
||||
// Helper function to build string and segments
|
||||
function add(str, {split, last, only}={}) {
|
||||
if (only !== 'path') regex += str;
|
||||
if (filepath && only !== 'regex') {
|
||||
path.regex += (str === '\\/' ? SEP : str);
|
||||
if (split) {
|
||||
if (last) segment += str;
|
||||
if (segment !== '') {
|
||||
if (!flags.includes('g')) segment = `^${segment}$`; // change it 'includes'
|
||||
path.segments.push(new RegExp(segment, flags));
|
||||
}
|
||||
segment = '';
|
||||
} else {
|
||||
segment += str;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let c, n;
|
||||
for (let i = 0; i < glob.length; i++) {
|
||||
c = glob[i];
|
||||
n = glob[i + 1];
|
||||
|
||||
if (['\\', '$', '^', '.', '='].includes(c)) {
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '/') {
|
||||
add(`\\${c}`, {split: true});
|
||||
if (n === '/' && !strict) regex += '?';
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '(') {
|
||||
if (ext.length) {
|
||||
add(c);
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === ')') {
|
||||
if (ext.length) {
|
||||
add(c);
|
||||
let type = ext.pop();
|
||||
if (type === '@') {
|
||||
add('{1}');
|
||||
} else if (type === '!') {
|
||||
add('([^\/]*)');
|
||||
} else {
|
||||
add(type);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '|') {
|
||||
if (ext.length) {
|
||||
add(c);
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '+') {
|
||||
if (n === '(' && extended) {
|
||||
ext.push(c);
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '@' && extended) {
|
||||
if (n === '(') {
|
||||
ext.push(c);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (c === '!') {
|
||||
if (extended) {
|
||||
if (inRange) {
|
||||
add('^');
|
||||
continue
|
||||
}
|
||||
if (n === '(') {
|
||||
ext.push(c);
|
||||
add('(?!');
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '?') {
|
||||
if (extended) {
|
||||
if (n === '(') {
|
||||
ext.push(c);
|
||||
} else {
|
||||
add('.');
|
||||
}
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '[') {
|
||||
if (inRange && n === ':') {
|
||||
i++; // skip [
|
||||
let value = '';
|
||||
while(glob[++i] !== ':') value += glob[i];
|
||||
if (value === 'alnum') add('(\\w|\\d)');
|
||||
else if (value === 'space') add('\\s');
|
||||
else if (value === 'digit') add('\\d');
|
||||
i++; // skip last ]
|
||||
continue;
|
||||
}
|
||||
if (extended) {
|
||||
inRange = true;
|
||||
add(c);
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === ']') {
|
||||
if (extended) {
|
||||
inRange = false;
|
||||
add(c);
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '{') {
|
||||
if (extended) {
|
||||
inGroup = true;
|
||||
add('(');
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '}') {
|
||||
if (extended) {
|
||||
inGroup = false;
|
||||
add(')');
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === ',') {
|
||||
if (inGroup) {
|
||||
add('|');
|
||||
continue;
|
||||
}
|
||||
add(`\\${c}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (c === '*') {
|
||||
if (n === '(' && extended) {
|
||||
ext.push(c);
|
||||
continue;
|
||||
}
|
||||
// Move over all consecutive "*"'s.
|
||||
// Also store the previous and next characters
|
||||
let prevChar = glob[i - 1];
|
||||
let starCount = 1;
|
||||
while (glob[i + 1] === '*') {
|
||||
starCount++;
|
||||
i++;
|
||||
}
|
||||
let nextChar = glob[i + 1];
|
||||
if (!globstar) {
|
||||
// globstar is disabled, so treat any number of "*" as one
|
||||
add('.*');
|
||||
} else {
|
||||
// globstar is enabled, so determine if this is a globstar segment
|
||||
let isGlobstar =
|
||||
starCount > 1 && // multiple "*"'s
|
||||
(prevChar === '/' || prevChar === undefined) && // from the start of the segment
|
||||
(nextChar === '/' || nextChar === undefined); // to the end of the segment
|
||||
if (isGlobstar) {
|
||||
// it's a globstar, so match zero or more path segments
|
||||
add(GLOBSTAR, {only:'regex'});
|
||||
add(GLOBSTAR_SEGMENT, {only:'path', last:true, split:true});
|
||||
i++; // move over the "/"
|
||||
} else {
|
||||
// it's not a globstar, so only match one path segment
|
||||
add(WILDCARD, {only:'regex'});
|
||||
add(WILDCARD_SEGMENT, {only:'path'});
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
add(c);
|
||||
}
|
||||
|
||||
|
||||
// When regexp 'g' flag is specified don't
|
||||
// constrain the regular expression with ^ & $
|
||||
if (!flags.includes('g')) {
|
||||
regex = `^${regex}$`;
|
||||
segment = `^${segment}$`;
|
||||
if (filepath) path.regex = `^${path.regex}$`;
|
||||
}
|
||||
|
||||
const result = {regex: new RegExp(regex, flags)};
|
||||
|
||||
// Push the last segment
|
||||
if (filepath) {
|
||||
path.segments.push(new RegExp(segment, flags));
|
||||
path.regex = new RegExp(path.regex, flags);
|
||||
path.globstar = new RegExp(!flags.includes('g') ? `^${GLOBSTAR_SEGMENT}$` : GLOBSTAR_SEGMENT, flags);
|
||||
result.path = path;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = globrex;
|
||||
@@ -0,0 +1,66 @@
|
||||
'use strict';
|
||||
|
||||
var YAMLException = require('./exception');
|
||||
|
||||
var TYPE_CONSTRUCTOR_OPTIONS = [
|
||||
'kind',
|
||||
'multi',
|
||||
'resolve',
|
||||
'construct',
|
||||
'instanceOf',
|
||||
'predicate',
|
||||
'represent',
|
||||
'representName',
|
||||
'defaultStyle',
|
||||
'styleAliases'
|
||||
];
|
||||
|
||||
var YAML_NODE_KINDS = [
|
||||
'scalar',
|
||||
'sequence',
|
||||
'mapping'
|
||||
];
|
||||
|
||||
function compileStyleAliases(map) {
|
||||
var result = {};
|
||||
|
||||
if (map !== null) {
|
||||
Object.keys(map).forEach(function (style) {
|
||||
map[style].forEach(function (alias) {
|
||||
result[String(alias)] = style;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function Type(tag, options) {
|
||||
options = options || {};
|
||||
|
||||
Object.keys(options).forEach(function (name) {
|
||||
if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) {
|
||||
throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.');
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: Add tag format check.
|
||||
this.options = options; // keep original options in case user wants to extend this type later
|
||||
this.tag = tag;
|
||||
this.kind = options['kind'] || null;
|
||||
this.resolve = options['resolve'] || function () { return true; };
|
||||
this.construct = options['construct'] || function (data) { return data; };
|
||||
this.instanceOf = options['instanceOf'] || null;
|
||||
this.predicate = options['predicate'] || null;
|
||||
this.represent = options['represent'] || null;
|
||||
this.representName = options['representName'] || null;
|
||||
this.defaultStyle = options['defaultStyle'] || null;
|
||||
this.multi = options['multi'] || false;
|
||||
this.styleAliases = compileStyleAliases(options['styleAliases'] || null);
|
||||
|
||||
if (YAML_NODE_KINDS.indexOf(this.kind) === -1) {
|
||||
throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Type;
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"F A B","2":"J D E CC"},B:{"1":"C K L G M N O P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H"},C:{"1":"0 1 2 3 4 5 6 7 8 9 DC tB I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB EC FC"},D:{"1":"0 1 2 3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB GC"},E:{"1":"I v J D E F A B C K L G IC JC KC LC 0B qB rB 1B MC NC 2B 3B 4B 5B sB 6B 7B 8B 9B OC","16":"HC zB"},F:{"1":"0 1 2 3 4 5 6 7 8 9 B C G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e SC qB AC TC rB","16":"F PC QC RC"},G:{"1":"E UC BC VC WC XC YC ZC aC bC cC dC eC fC gC hC iC jC kC lC mC nC 2B 3B 4B 5B sB 6B 7B 8B 9B","16":"zB"},H:{"2":"oC"},I:{"1":"tB I f pC qC rC sC BC tC uC"},J:{"1":"D A"},K:{"1":"A B C h qB AC rB"},L:{"1":"H"},M:{"1":"H"},N:{"1":"A B"},O:{"1":"vC"},P:{"1":"I g wC xC yC zC 0C 0B 1C 2C 3C 4C 5C sB 6C 7C 8C"},Q:{"1":"1B"},R:{"1":"9C"},S:{"1":"AD BD"}},B:1,C:"Selection controls for input & textarea"};
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"B","2":"J D CC","66":"E F A"},B:{"1":"C K L G M N O P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H"},C:{"1":"0 1 2 3 4 5 6 7 8 9 AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB","2":"DC tB I v J D E F A B C K L G M N O w g x y EC FC","66":"z","129":"gB hB iB jB kB h lB mB nB oB","388":"pB P Q R wB S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB"},D:{"1":"0 1 2 3 4 5 6 7 8 9 y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R S T","2":"I v J D E F A B C K L G M N O w g x","1540":"U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB GC"},E:{"1":"D E F A B C K KC LC 0B qB rB","2":"I v J HC zB IC JC","513":"L G 1B MC NC 2B 3B 4B 5B sB 6B 7B 8B 9B OC"},F:{"1":"0 1 2 3 4 5 6 7 8 9 G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB rB","2":"F B C PC QC RC SC qB AC TC","1540":"h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e"},G:{"1":"E VC WC XC YC ZC aC bC cC dC eC fC gC hC iC jC kC lC mC nC 2B 3B 4B 5B sB 6B 7B 8B 9B","2":"zB UC BC"},H:{"1":"oC"},I:{"1":"f","2":"tB I pC qC rC sC BC tC uC"},J:{"1":"A","2":"D"},K:{"1":"h rB","2":"A B C qB AC"},L:{"1":"H"},M:{"129":"H"},N:{"1":"B","66":"A"},O:{"1":"vC"},P:{"1":"I g wC xC yC zC 0C 0B 1C 2C 3C 4C 5C sB 6C 7C 8C"},Q:{"1":"1B"},R:{"1":"9C"},S:{"1":"AD BD"}},B:6,C:"TLS 1.1"};
|
||||
@@ -0,0 +1 @@
|
||||
{"name":"postcss-load-config","version":"3.1.4","files":{"LICENSE":{"checkedAt":1678883673217,"integrity":"sha512-NN6Es6hamJKMLu08deOXnxeZihWZFLePY4E6L0H/eMo2FWp9+Txp3/fb4rSJvKoo6p59PYfPdnyl8s/y3WvJZA==","mode":420,"size":1105},"src/options.js":{"checkedAt":1678883673217,"integrity":"sha512-8izlTo04gqF92/GyS/DA1iaSHYui8W2Xo0P41Y2JF1PyHpUzVan8V+LlIaJYeEffOWuphTcMCh4Ygw5hQie0Vw==","mode":420,"size":1057},"src/plugins.js":{"checkedAt":1678883673217,"integrity":"sha512-UJUFNPHB9r96BCYbs8TSjYKDUIhAzIDJlrZK8sj3OuDxsxxq5YkKsXx3V8SFpmFci0CBP3YQm46M8iV9o5eKbw==","mode":420,"size":1859},"src/index.js":{"checkedAt":1678883673217,"integrity":"sha512-HdUYiILq1LqQR45HjjC10ElwRb07rctnvCOaiSf3j4upMwW4cirQuyL7pzASdcgzPy7VQj/7mPjT7cM4c9cuSw==","mode":420,"size":4282},"package.json":{"checkedAt":1678883673217,"integrity":"sha512-sNzx8HHQ5noJMvyGylLZWoXE4kMMns8J9tyYIn4taqM7/0UQOy4Lqnj7XmcSKb+16umJszoy5y3bEfSc5ZuMQA==","mode":420,"size":945},"src/req.js":{"checkedAt":1678883673217,"integrity":"sha512-grcLnZXu9DxFl7pD2M0vFc9HbYdDuhZQFMSSVFQfU520eo6VVt3bRsA3h4v2r17pdKJzRvbJdJmqeyMdvDfwXw==","mode":420,"size":289},"src/index.d.ts":{"checkedAt":1678883673219,"integrity":"sha512-lPX2AA5xOnd458xxfG+m03HUzRpuaYrGRZNUtuO9peaF1XasBCV51GxFDnHiAlo5y+eIGucTe45jRKpjS6AGOA==","mode":420,"size":2221},"README.md":{"checkedAt":1678883673219,"integrity":"sha512-kKxgk4333ZN7a6FMIEA6MefReudqUxKt/1uphenr52uZfUaV94l+NhsOskMEUCMC1BVFGjUBPQVs4d4uPxd77A==","mode":420,"size":10985}}}
|
||||
@@ -0,0 +1,53 @@
|
||||
import { Subject } from '../Subject';
|
||||
import { Subscriber } from '../Subscriber';
|
||||
import { Subscription } from '../Subscription';
|
||||
import { Scheduler } from '../Scheduler';
|
||||
import { TestMessage } from './TestMessage';
|
||||
import { SubscriptionLog } from './SubscriptionLog';
|
||||
import { SubscriptionLoggable } from './SubscriptionLoggable';
|
||||
import { applyMixins } from '../util/applyMixins';
|
||||
import { observeNotification } from '../Notification';
|
||||
|
||||
export class HotObservable<T> extends Subject<T> implements SubscriptionLoggable {
|
||||
public subscriptions: SubscriptionLog[] = [];
|
||||
scheduler: Scheduler;
|
||||
// @ts-ignore: Property has no initializer and is not definitely assigned
|
||||
logSubscribedFrame: () => number;
|
||||
// @ts-ignore: Property has no initializer and is not definitely assigned
|
||||
logUnsubscribedFrame: (index: number) => void;
|
||||
|
||||
constructor(public messages: TestMessage[], scheduler: Scheduler) {
|
||||
super();
|
||||
this.scheduler = scheduler;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
protected _subscribe(subscriber: Subscriber<any>): Subscription {
|
||||
const subject: HotObservable<T> = this;
|
||||
const index = subject.logSubscribedFrame();
|
||||
const subscription = new Subscription();
|
||||
subscription.add(
|
||||
new Subscription(() => {
|
||||
subject.logUnsubscribedFrame(index);
|
||||
})
|
||||
);
|
||||
subscription.add(super._subscribe(subscriber));
|
||||
return subscription;
|
||||
}
|
||||
|
||||
setup() {
|
||||
const subject = this;
|
||||
const messagesLength = subject.messages.length;
|
||||
/* tslint:disable:no-var-keyword */
|
||||
for (let i = 0; i < messagesLength; i++) {
|
||||
(() => {
|
||||
const { notification, frame } = subject.messages[i];
|
||||
/* tslint:enable */
|
||||
subject.scheduler.schedule(() => {
|
||||
observeNotification(notification, subject);
|
||||
}, frame);
|
||||
})();
|
||||
}
|
||||
}
|
||||
}
|
||||
applyMixins(HotObservable, [SubscriptionLoggable]);
|
||||
@@ -0,0 +1,203 @@
|
||||
# Can I cache this? [](https://travis-ci.org/kornelski/http-cache-semantics)
|
||||
|
||||
`CachePolicy` tells when responses can be reused from a cache, taking into account [HTTP RFC 7234](http://httpwg.org/specs/rfc7234.html) rules for user agents and shared caches.
|
||||
It also implements [RFC 5861](https://tools.ietf.org/html/rfc5861), implementing `stale-if-error` and `stale-while-revalidate`.
|
||||
It's aware of many tricky details such as the `Vary` header, proxy revalidation, and authenticated responses.
|
||||
|
||||
## Usage
|
||||
|
||||
Cacheability of an HTTP response depends on how it was requested, so both `request` and `response` are required to create the policy.
|
||||
|
||||
```js
|
||||
const policy = new CachePolicy(request, response, options);
|
||||
|
||||
if (!policy.storable()) {
|
||||
// throw the response away, it's not usable at all
|
||||
return;
|
||||
}
|
||||
|
||||
// Cache the data AND the policy object in your cache
|
||||
// (this is pseudocode, roll your own cache (lru-cache package works))
|
||||
letsPretendThisIsSomeCache.set(
|
||||
request.url,
|
||||
{ policy, response },
|
||||
policy.timeToLive()
|
||||
);
|
||||
```
|
||||
|
||||
```js
|
||||
// And later, when you receive a new request:
|
||||
const { policy, response } = letsPretendThisIsSomeCache.get(newRequest.url);
|
||||
|
||||
// It's not enough that it exists in the cache, it has to match the new request, too:
|
||||
if (policy && policy.satisfiesWithoutRevalidation(newRequest)) {
|
||||
// OK, the previous response can be used to respond to the `newRequest`.
|
||||
// Response headers have to be updated, e.g. to add Age and remove uncacheable headers.
|
||||
response.headers = policy.responseHeaders();
|
||||
return response;
|
||||
}
|
||||
```
|
||||
|
||||
It may be surprising, but it's not enough for an HTTP response to be [fresh](#yo-fresh) to satisfy a request. It may need to match request headers specified in `Vary`. Even a matching fresh response may still not be usable if the new request restricted cacheability, etc.
|
||||
|
||||
The key method is `satisfiesWithoutRevalidation(newRequest)`, which checks whether the `newRequest` is compatible with the original request and whether all caching conditions are met.
|
||||
|
||||
### Constructor options
|
||||
|
||||
Request and response must have a `headers` property with all header names in lower case. `url`, `status` and `method` are optional (defaults are any URL, status `200`, and `GET` method).
|
||||
|
||||
```js
|
||||
const request = {
|
||||
url: '/',
|
||||
method: 'GET',
|
||||
headers: {
|
||||
accept: '*/*',
|
||||
},
|
||||
};
|
||||
|
||||
const response = {
|
||||
status: 200,
|
||||
headers: {
|
||||
'cache-control': 'public, max-age=7234',
|
||||
},
|
||||
};
|
||||
|
||||
const options = {
|
||||
shared: true,
|
||||
cacheHeuristic: 0.1,
|
||||
immutableMinTimeToLive: 24 * 3600 * 1000, // 24h
|
||||
ignoreCargoCult: false,
|
||||
};
|
||||
```
|
||||
|
||||
If `options.shared` is `true` (default), then the response is evaluated from a perspective of a shared cache (i.e. `private` is not cacheable and `s-maxage` is respected). If `options.shared` is `false`, then the response is evaluated from a perspective of a single-user cache (i.e. `private` is cacheable and `s-maxage` is ignored). `shared: true` is recommended for HTTP clients.
|
||||
|
||||
`options.cacheHeuristic` is a fraction of response's age that is used as a fallback cache duration. The default is 0.1 (10%), e.g. if a file hasn't been modified for 100 days, it'll be cached for 100\*0.1 = 10 days.
|
||||
|
||||
`options.immutableMinTimeToLive` is a number of milliseconds to assume as the default time to cache responses with `Cache-Control: immutable`. Note that [per RFC](http://httpwg.org/http-extensions/immutable.html) these can become stale, so `max-age` still overrides the default.
|
||||
|
||||
If `options.ignoreCargoCult` is true, common anti-cache directives will be completely ignored if the non-standard `pre-check` and `post-check` directives are present. These two useless directives are most commonly found in bad StackOverflow answers and PHP's "session limiter" defaults.
|
||||
|
||||
### `storable()`
|
||||
|
||||
Returns `true` if the response can be stored in a cache. If it's `false` then you MUST NOT store either the request or the response.
|
||||
|
||||
### `satisfiesWithoutRevalidation(newRequest)`
|
||||
|
||||
This is the most important method. Use this method to check whether the cached response is still fresh in the context of the new request.
|
||||
|
||||
If it returns `true`, then the given `request` matches the original response this cache policy has been created with, and the response can be reused without contacting the server. Note that the old response can't be returned without being updated, see `responseHeaders()`.
|
||||
|
||||
If it returns `false`, then the response may not be matching at all (e.g. it's for a different URL or method), or may require to be refreshed first (see `revalidationHeaders()`).
|
||||
|
||||
### `responseHeaders()`
|
||||
|
||||
Returns updated, filtered set of response headers to return to clients receiving the cached response. This function is necessary, because proxies MUST always remove hop-by-hop headers (such as `TE` and `Connection`) and update response's `Age` to avoid doubling cache time.
|
||||
|
||||
```js
|
||||
cachedResponse.headers = cachePolicy.responseHeaders(cachedResponse);
|
||||
```
|
||||
|
||||
### `timeToLive()`
|
||||
|
||||
Returns approximate time in _milliseconds_ until the response becomes stale (i.e. not fresh).
|
||||
|
||||
After that time (when `timeToLive() <= 0`) the response might not be usable without revalidation. However, there are exceptions, e.g. a client can explicitly allow stale responses, so always check with `satisfiesWithoutRevalidation()`.
|
||||
`stale-if-error` and `stale-while-revalidate` extend the time to live of the cache, that can still be used if stale.
|
||||
|
||||
### `toObject()`/`fromObject(json)`
|
||||
|
||||
Chances are you'll want to store the `CachePolicy` object along with the cached response. `obj = policy.toObject()` gives a plain JSON-serializable object. `policy = CachePolicy.fromObject(obj)` creates an instance from it.
|
||||
|
||||
### Refreshing stale cache (revalidation)
|
||||
|
||||
When a cached response has expired, it can be made fresh again by making a request to the origin server. The server may respond with status 304 (Not Modified) without sending the response body again, saving bandwidth.
|
||||
|
||||
The following methods help perform the update efficiently and correctly.
|
||||
|
||||
#### `revalidationHeaders(newRequest)`
|
||||
|
||||
Returns updated, filtered set of request headers to send to the origin server to check if the cached response can be reused. These headers allow the origin server to return status 304 indicating the response is still fresh. All headers unrelated to caching are passed through as-is.
|
||||
|
||||
Use this method when updating cache from the origin server.
|
||||
|
||||
```js
|
||||
updateRequest.headers = cachePolicy.revalidationHeaders(updateRequest);
|
||||
```
|
||||
|
||||
#### `revalidatedPolicy(revalidationRequest, revalidationResponse)`
|
||||
|
||||
Use this method to update the cache after receiving a new response from the origin server. It returns an object with two keys:
|
||||
|
||||
- `policy` — A new `CachePolicy` with HTTP headers updated from `revalidationResponse`. You can always replace the old cached `CachePolicy` with the new one.
|
||||
- `modified` — Boolean indicating whether the response body has changed.
|
||||
- If `false`, then a valid 304 Not Modified response has been received, and you can reuse the old cached response body. This is also affected by `stale-if-error`.
|
||||
- If `true`, you should use new response's body (if present), or make another request to the origin server without any conditional headers (i.e. don't use `revalidationHeaders()` this time) to get the new resource.
|
||||
|
||||
```js
|
||||
// When serving requests from cache:
|
||||
const { oldPolicy, oldResponse } = letsPretendThisIsSomeCache.get(
|
||||
newRequest.url
|
||||
);
|
||||
|
||||
if (!oldPolicy.satisfiesWithoutRevalidation(newRequest)) {
|
||||
// Change the request to ask the origin server if the cached response can be used
|
||||
newRequest.headers = oldPolicy.revalidationHeaders(newRequest);
|
||||
|
||||
// Send request to the origin server. The server may respond with status 304
|
||||
const newResponse = await makeRequest(newRequest);
|
||||
|
||||
// Create updated policy and combined response from the old and new data
|
||||
const { policy, modified } = oldPolicy.revalidatedPolicy(
|
||||
newRequest,
|
||||
newResponse
|
||||
);
|
||||
const response = modified ? newResponse : oldResponse;
|
||||
|
||||
// Update the cache with the newer/fresher response
|
||||
letsPretendThisIsSomeCache.set(
|
||||
newRequest.url,
|
||||
{ policy, response },
|
||||
policy.timeToLive()
|
||||
);
|
||||
|
||||
// And proceed returning cached response as usual
|
||||
response.headers = policy.responseHeaders();
|
||||
return response;
|
||||
}
|
||||
```
|
||||
|
||||
# Yo, FRESH
|
||||
|
||||

|
||||
|
||||
## Used by
|
||||
|
||||
- [ImageOptim API](https://imageoptim.com/api), [make-fetch-happen](https://github.com/zkat/make-fetch-happen), [cacheable-request](https://www.npmjs.com/package/cacheable-request) ([got](https://www.npmjs.com/package/got)), [npm/registry-fetch](https://github.com/npm/registry-fetch), [etc.](https://github.com/kornelski/http-cache-semantics/network/dependents)
|
||||
|
||||
## Implemented
|
||||
|
||||
- `Cache-Control` response header with all the quirks.
|
||||
- `Expires` with check for bad clocks.
|
||||
- `Pragma` response header.
|
||||
- `Age` response header.
|
||||
- `Vary` response header.
|
||||
- Default cacheability of statuses and methods.
|
||||
- Requests for stale data.
|
||||
- Filtering of hop-by-hop headers.
|
||||
- Basic revalidation request
|
||||
- `stale-if-error`
|
||||
|
||||
## Unimplemented
|
||||
|
||||
- Merging of range requests, `If-Range` (but correctly supports them as non-cacheable)
|
||||
- Revalidation of multiple representations
|
||||
|
||||
### Trusting server `Date`
|
||||
|
||||
Per the RFC, the cache should take into account the time between server-supplied `Date` and the time it received the response. The RFC-mandated behavior creates two problems:
|
||||
|
||||
* Servers with incorrectly set timezone may add several hours to cache age (or more, if the clock is completely wrong).
|
||||
* Even reasonably correct clocks may be off by a couple of seconds, breaking `max-age=1` trick (which is useful for reverse proxies on high-traffic servers).
|
||||
|
||||
Previous versions of this library had an option to ignore the server date if it was "too inaccurate". To support the `max-age=1` trick the library also has to ignore dates that pretty accurate. There's no point of having an option to trust dates that are only a bit inaccurate, so this library won't trust any server dates. `max-age` will be interpreted from the time the response has been received, not from when it has been sent. This will affect only [RFC 1149 networks](https://tools.ietf.org/html/rfc1149).
|
||||
@@ -0,0 +1,3 @@
|
||||
export type CreateResetToken = {
|
||||
email: string;
|
||||
};
|
||||
@@ -0,0 +1,64 @@
|
||||
declare namespace onetime {
|
||||
interface Options {
|
||||
/**
|
||||
Throw an error when called more than once.
|
||||
|
||||
@default false
|
||||
*/
|
||||
throw?: boolean;
|
||||
}
|
||||
}
|
||||
|
||||
declare const onetime: {
|
||||
/**
|
||||
Ensure a function is only called once. When called multiple times it will return the return value from the first call.
|
||||
|
||||
@param fn - Function that should only be called once.
|
||||
@returns A function that only calls `fn` once.
|
||||
|
||||
@example
|
||||
```
|
||||
import onetime = require('onetime');
|
||||
|
||||
let i = 0;
|
||||
|
||||
const foo = onetime(() => ++i);
|
||||
|
||||
foo(); //=> 1
|
||||
foo(); //=> 1
|
||||
foo(); //=> 1
|
||||
|
||||
onetime.callCount(foo); //=> 3
|
||||
```
|
||||
*/
|
||||
<ArgumentsType extends unknown[], ReturnType>(
|
||||
fn: (...arguments: ArgumentsType) => ReturnType,
|
||||
options?: onetime.Options
|
||||
): (...arguments: ArgumentsType) => ReturnType;
|
||||
|
||||
/**
|
||||
Get the number of times `fn` has been called.
|
||||
|
||||
@param fn - Function to get call count from.
|
||||
@returns A number representing how many times `fn` has been called.
|
||||
|
||||
@example
|
||||
```
|
||||
import onetime = require('onetime');
|
||||
|
||||
const foo = onetime(() => {});
|
||||
foo();
|
||||
foo();
|
||||
foo();
|
||||
|
||||
console.log(onetime.callCount(foo));
|
||||
//=> 3
|
||||
```
|
||||
*/
|
||||
callCount(fn: (...arguments: any[]) => unknown): number;
|
||||
|
||||
// TODO: Remove this for the next major release
|
||||
default: typeof onetime;
|
||||
};
|
||||
|
||||
export = onetime;
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,59 @@
|
||||
var baseFindIndex = require('./_baseFindIndex'),
|
||||
baseIteratee = require('./_baseIteratee'),
|
||||
toInteger = require('./toInteger');
|
||||
|
||||
/* Built-in method references for those with the same name as other `lodash` methods. */
|
||||
var nativeMax = Math.max,
|
||||
nativeMin = Math.min;
|
||||
|
||||
/**
|
||||
* This method is like `_.findIndex` except that it iterates over elements
|
||||
* of `collection` from right to left.
|
||||
*
|
||||
* @static
|
||||
* @memberOf _
|
||||
* @since 2.0.0
|
||||
* @category Array
|
||||
* @param {Array} array The array to inspect.
|
||||
* @param {Function} [predicate=_.identity] The function invoked per iteration.
|
||||
* @param {number} [fromIndex=array.length-1] The index to search from.
|
||||
* @returns {number} Returns the index of the found element, else `-1`.
|
||||
* @example
|
||||
*
|
||||
* var users = [
|
||||
* { 'user': 'barney', 'active': true },
|
||||
* { 'user': 'fred', 'active': false },
|
||||
* { 'user': 'pebbles', 'active': false }
|
||||
* ];
|
||||
*
|
||||
* _.findLastIndex(users, function(o) { return o.user == 'pebbles'; });
|
||||
* // => 2
|
||||
*
|
||||
* // The `_.matches` iteratee shorthand.
|
||||
* _.findLastIndex(users, { 'user': 'barney', 'active': true });
|
||||
* // => 0
|
||||
*
|
||||
* // The `_.matchesProperty` iteratee shorthand.
|
||||
* _.findLastIndex(users, ['active', false]);
|
||||
* // => 2
|
||||
*
|
||||
* // The `_.property` iteratee shorthand.
|
||||
* _.findLastIndex(users, 'active');
|
||||
* // => 0
|
||||
*/
|
||||
function findLastIndex(array, predicate, fromIndex) {
|
||||
var length = array == null ? 0 : array.length;
|
||||
if (!length) {
|
||||
return -1;
|
||||
}
|
||||
var index = length - 1;
|
||||
if (fromIndex !== undefined) {
|
||||
index = toInteger(fromIndex);
|
||||
index = fromIndex < 0
|
||||
? nativeMax(length + index, 0)
|
||||
: nativeMin(index, length - 1);
|
||||
}
|
||||
return baseFindIndex(array, baseIteratee(predicate, 3), index, true);
|
||||
}
|
||||
|
||||
module.exports = findLastIndex;
|
||||
@@ -0,0 +1,9 @@
|
||||
import { operate } from '../util/lift';
|
||||
import { createOperatorSubscriber } from './OperatorSubscriber';
|
||||
import { noop } from '../util/noop';
|
||||
export function ignoreElements() {
|
||||
return operate((source, subscriber) => {
|
||||
source.subscribe(createOperatorSubscriber(subscriber, noop));
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=ignoreElements.js.map
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"1":"F A B","2":"CC","8":"J D E"},B:{"1":"C K L G M N O P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H"},C:{"1":"0 1 2 3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB EC FC","8":"DC tB"},D:{"1":"0 1 2 3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB GC"},E:{"1":"I v J D E F A B C K L G IC JC KC LC 0B qB rB 1B MC NC 2B 3B 4B 5B sB 6B 7B 8B 9B OC","8":"HC zB"},F:{"1":"0 1 2 3 4 5 6 7 8 9 B C G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e RC SC qB AC TC rB","8":"F PC QC"},G:{"1":"E zB UC BC VC WC XC YC ZC aC bC cC dC eC fC gC hC iC jC kC lC mC nC 2B 3B 4B 5B sB 6B 7B 8B 9B"},H:{"2":"oC"},I:{"1":"tB I f pC qC rC sC BC tC uC"},J:{"1":"D A"},K:{"1":"B C h qB AC rB","8":"A"},L:{"1":"H"},M:{"1":"H"},N:{"1":"A B"},O:{"1":"vC"},P:{"1":"I g wC xC yC zC 0C 0B 1C 2C 3C 4C 5C sB 6C 7C 8C"},Q:{"1":"1B"},R:{"1":"9C"},S:{"1":"AD BD"}},B:1,C:"Text API for Canvas"};
|
||||
@@ -0,0 +1,13 @@
|
||||
/** prettier */
|
||||
import { Observable } from '../Observable';
|
||||
import { isFunction } from './isFunction';
|
||||
|
||||
/**
|
||||
* Tests to see if the object is an RxJS {@link Observable}
|
||||
* @param obj the object to test
|
||||
*/
|
||||
export function isObservable(obj: any): obj is Observable<unknown> {
|
||||
// The !! is to ensure that this publicly exposed function returns
|
||||
// `false` if something like `null` or `0` is passed.
|
||||
return !!obj && (obj instanceof Observable || (isFunction(obj.lift) && isFunction(obj.subscribe)));
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
|
||||
/**
|
||||
* For Node.js, simply re-export the core `util.deprecate` function.
|
||||
*/
|
||||
|
||||
module.exports = require('util').deprecate;
|
||||
@@ -0,0 +1,169 @@
|
||||
import type { FormDataEncoderHeaders } from "./util/Headers.js";
|
||||
import type { FormDataLike } from "./FormDataLike.js";
|
||||
import type { FileLike } from "./FileLike.js";
|
||||
export interface FormDataEncoderOptions {
|
||||
/**
|
||||
* When enabled, the encoder will emit additional per part headers, such as `Content-Length`.
|
||||
*
|
||||
* Please note that the web clients do not include these, so when enabled this option might cause an error if `multipart/form-data` does not consider additional headers.
|
||||
*
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
enableAdditionalHeaders?: boolean;
|
||||
}
|
||||
/**
|
||||
* Implements [`multipart/form-data` encoding algorithm](https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#multipart/form-data-encoding-algorithm),
|
||||
* allowing to add support for spec-comliant [`FormData`](https://developer.mozilla.org/en-US/docs/Web/API/FormData) to an HTTP client.
|
||||
*/
|
||||
export declare class FormDataEncoder {
|
||||
#private;
|
||||
/**
|
||||
* Returns boundary string
|
||||
*/
|
||||
readonly boundary: string;
|
||||
/**
|
||||
* Returns Content-Type header
|
||||
*/
|
||||
readonly contentType: string;
|
||||
/**
|
||||
* Returns Content-Length header
|
||||
*/
|
||||
readonly contentLength: string | undefined;
|
||||
/**
|
||||
* Returns headers object with Content-Type and Content-Length header
|
||||
*/
|
||||
readonly headers: Readonly<FormDataEncoderHeaders>;
|
||||
/**
|
||||
* Creates a multipart/form-data encoder.
|
||||
*
|
||||
* @param form FormData object to encode. This object must be a spec-compatible FormData implementation.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```js
|
||||
* import {Readable} from "stream"
|
||||
*
|
||||
* import {FormData, File, fileFromPath} from "formdata-node"
|
||||
* import {FormDataEncoder} from "form-data-encoder"
|
||||
*
|
||||
* import fetch from "node-fetch"
|
||||
*
|
||||
* const form = new FormData()
|
||||
*
|
||||
* form.set("field", "Just a random string")
|
||||
* form.set("file", new File(["Using files is class amazing"], "file.txt"))
|
||||
* form.set("fileFromPath", await fileFromPath("path/to/a/file.txt"))
|
||||
*
|
||||
* const encoder = new FormDataEncoder(form)
|
||||
*
|
||||
* const options = {
|
||||
* method: "post",
|
||||
* headers: encoder.headers,
|
||||
* body: Readable.from(encoder)
|
||||
* }
|
||||
*
|
||||
* const response = await fetch("https://httpbin.org/post", options)
|
||||
*
|
||||
* console.log(await response.json())
|
||||
* ```
|
||||
*/
|
||||
constructor(form: FormDataLike);
|
||||
/**
|
||||
* Creates multipart/form-data encoder with custom boundary string.
|
||||
*
|
||||
* @param form FormData object to encode. This object must be a spec-compatible FormData implementation.
|
||||
* @param boundary An optional boundary string that will be used by the encoder. If there's no boundary string is present, Encoder will generate it automatically.
|
||||
*/
|
||||
constructor(form: FormDataLike, boundary: string);
|
||||
/**
|
||||
* Creates multipart/form-data encoder with additional options.
|
||||
*
|
||||
* @param form FormData object to encode. This object must be a spec-compatible FormData implementation.
|
||||
* @param options Additional options
|
||||
*/
|
||||
constructor(form: FormDataLike, options: FormDataEncoderOptions);
|
||||
constructor(form: FormDataLike, boundary: string, options?: FormDataEncoderOptions);
|
||||
/**
|
||||
* Returns form-data content length
|
||||
*
|
||||
* @deprecated Use FormDataEncoder.contentLength or FormDataEncoder.headers["Content-Length"] instead
|
||||
*/
|
||||
getContentLength(): number | undefined;
|
||||
/**
|
||||
* Creates an iterator allowing to go through form-data parts (with metadata).
|
||||
* This method **will not** read the files.
|
||||
*
|
||||
* Using this method, you can convert form-data content into Blob:
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* import {Readable} from "stream"
|
||||
*
|
||||
* import {FormDataEncoder} from "form-data-encoder"
|
||||
*
|
||||
* import {FormData} from "formdata-polyfill/esm-min.js"
|
||||
* import {fileFrom} from "fetch-blob/form.js"
|
||||
* import {File} from "fetch-blob/file.js"
|
||||
* import {Blob} from "fetch-blob"
|
||||
*
|
||||
* import fetch from "node-fetch"
|
||||
*
|
||||
* const form = new FormData()
|
||||
*
|
||||
* form.set("field", "Just a random string")
|
||||
* form.set("file", new File(["Using files is class amazing"]))
|
||||
* form.set("fileFromPath", await fileFrom("path/to/a/file.txt"))
|
||||
*
|
||||
* const encoder = new FormDataEncoder(form)
|
||||
*
|
||||
* const options = {
|
||||
* method: "post",
|
||||
* body: new Blob(encoder, {type: encoder.contentType})
|
||||
* }
|
||||
*
|
||||
* const response = await fetch("https://httpbin.org/post", options)
|
||||
*
|
||||
* console.log(await response.json())
|
||||
*/
|
||||
values(): Generator<Uint8Array | FileLike, void, undefined>;
|
||||
/**
|
||||
* Creates an async iterator allowing to perform the encoding by portions.
|
||||
* This method **will** also read files.
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* import {Readable} from "stream"
|
||||
*
|
||||
* import {FormData, File, fileFromPath} from "formdata-node"
|
||||
* import {FormDataEncoder} from "form-data-encoder"
|
||||
*
|
||||
* import fetch from "node-fetch"
|
||||
*
|
||||
* const form = new FormData()
|
||||
*
|
||||
* form.set("field", "Just a random string")
|
||||
* form.set("file", new File(["Using files is class amazing"], "file.txt"))
|
||||
* form.set("fileFromPath", await fileFromPath("path/to/a/file.txt"))
|
||||
*
|
||||
* const encoder = new FormDataEncoder(form)
|
||||
*
|
||||
* const options = {
|
||||
* method: "post",
|
||||
* headers: encoder.headers,
|
||||
* body: Readable.from(encoder.encode()) // or Readable.from(encoder)
|
||||
* }
|
||||
*
|
||||
* const response = await fetch("https://httpbin.org/post", options)
|
||||
*
|
||||
* console.log(await response.json())
|
||||
*/
|
||||
encode(): AsyncGenerator<Uint8Array, void, undefined>;
|
||||
/**
|
||||
* Creates an iterator allowing to read through the encoder data using for...of loops
|
||||
*/
|
||||
[Symbol.iterator](): Generator<Uint8Array | FileLike, void, undefined>;
|
||||
/**
|
||||
* Creates an **async** iterator allowing to read through the encoder data using for-await...of loops
|
||||
*/
|
||||
[Symbol.asyncIterator](): AsyncGenerator<Uint8Array, void, undefined>;
|
||||
}
|
||||
@@ -0,0 +1,263 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
function _export(target, all) {
|
||||
for(var name in all)Object.defineProperty(target, name, {
|
||||
enumerable: true,
|
||||
get: all[name]
|
||||
});
|
||||
}
|
||||
_export(exports, {
|
||||
normalize: ()=>normalize,
|
||||
url: ()=>url,
|
||||
number: ()=>number,
|
||||
percentage: ()=>percentage,
|
||||
length: ()=>length,
|
||||
lineWidth: ()=>lineWidth,
|
||||
shadow: ()=>shadow,
|
||||
color: ()=>color,
|
||||
image: ()=>image,
|
||||
gradient: ()=>gradient,
|
||||
position: ()=>position,
|
||||
familyName: ()=>familyName,
|
||||
genericName: ()=>genericName,
|
||||
absoluteSize: ()=>absoluteSize,
|
||||
relativeSize: ()=>relativeSize
|
||||
});
|
||||
const _color = require("./color");
|
||||
const _parseBoxShadowValue = require("./parseBoxShadowValue");
|
||||
const _splitAtTopLevelOnly = require("./splitAtTopLevelOnly");
|
||||
let cssFunctions = [
|
||||
"min",
|
||||
"max",
|
||||
"clamp",
|
||||
"calc"
|
||||
];
|
||||
// Ref: https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Types
|
||||
function isCSSFunction(value) {
|
||||
return cssFunctions.some((fn)=>new RegExp(`^${fn}\\(.*\\)`).test(value));
|
||||
}
|
||||
const placeholder = "--tw-placeholder";
|
||||
const placeholderRe = new RegExp(placeholder, "g");
|
||||
function normalize(value, isRoot = true) {
|
||||
// Keep raw strings if it starts with `url(`
|
||||
if (value.includes("url(")) {
|
||||
return value.split(/(url\(.*?\))/g).filter(Boolean).map((part)=>{
|
||||
if (/^url\(.*?\)$/.test(part)) {
|
||||
return part;
|
||||
}
|
||||
return normalize(part, false);
|
||||
}).join("");
|
||||
}
|
||||
// Convert `_` to ` `, except for escaped underscores `\_`
|
||||
value = value.replace(/([^\\])_+/g, (fullMatch, characterBefore)=>characterBefore + " ".repeat(fullMatch.length - 1)).replace(/^_/g, " ").replace(/\\_/g, "_");
|
||||
// Remove leftover whitespace
|
||||
if (isRoot) {
|
||||
value = value.trim();
|
||||
}
|
||||
// Add spaces around operators inside math functions like calc() that do not follow an operator
|
||||
// or '('.
|
||||
value = value.replace(/(calc|min|max|clamp)\(.+\)/g, (match)=>{
|
||||
let vars = [];
|
||||
return match.replace(/var\((--.+?)[,)]/g, (match, g1)=>{
|
||||
vars.push(g1);
|
||||
return match.replace(g1, placeholder);
|
||||
}).replace(/(-?\d*\.?\d(?!\b-\d.+[,)](?![^+\-/*])\D)(?:%|[a-z]+)?|\))([+\-/*])/g, "$1 $2 ").replace(placeholderRe, ()=>vars.shift());
|
||||
});
|
||||
return value;
|
||||
}
|
||||
function url(value) {
|
||||
return value.startsWith("url(");
|
||||
}
|
||||
function number(value) {
|
||||
return !isNaN(Number(value)) || isCSSFunction(value);
|
||||
}
|
||||
function percentage(value) {
|
||||
return value.endsWith("%") && number(value.slice(0, -1)) || isCSSFunction(value);
|
||||
}
|
||||
// Please refer to MDN when updating this list:
|
||||
// https://developer.mozilla.org/en-US/docs/Learn/CSS/Building_blocks/Values_and_units
|
||||
// https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Container_Queries#container_query_length_units
|
||||
let lengthUnits = [
|
||||
"cm",
|
||||
"mm",
|
||||
"Q",
|
||||
"in",
|
||||
"pc",
|
||||
"pt",
|
||||
"px",
|
||||
"em",
|
||||
"ex",
|
||||
"ch",
|
||||
"rem",
|
||||
"lh",
|
||||
"rlh",
|
||||
"vw",
|
||||
"vh",
|
||||
"vmin",
|
||||
"vmax",
|
||||
"vb",
|
||||
"vi",
|
||||
"svw",
|
||||
"svh",
|
||||
"lvw",
|
||||
"lvh",
|
||||
"dvw",
|
||||
"dvh",
|
||||
"cqw",
|
||||
"cqh",
|
||||
"cqi",
|
||||
"cqb",
|
||||
"cqmin",
|
||||
"cqmax"
|
||||
];
|
||||
let lengthUnitsPattern = `(?:${lengthUnits.join("|")})`;
|
||||
function length(value) {
|
||||
return value === "0" || new RegExp(`^[+-]?[0-9]*\.?[0-9]+(?:[eE][+-]?[0-9]+)?${lengthUnitsPattern}$`).test(value) || isCSSFunction(value);
|
||||
}
|
||||
let lineWidths = new Set([
|
||||
"thin",
|
||||
"medium",
|
||||
"thick"
|
||||
]);
|
||||
function lineWidth(value) {
|
||||
return lineWidths.has(value);
|
||||
}
|
||||
function shadow(value) {
|
||||
let parsedShadows = (0, _parseBoxShadowValue.parseBoxShadowValue)(normalize(value));
|
||||
for (let parsedShadow of parsedShadows){
|
||||
if (!parsedShadow.valid) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function color(value) {
|
||||
let colors = 0;
|
||||
let result = (0, _splitAtTopLevelOnly.splitAtTopLevelOnly)(value, "_").every((part)=>{
|
||||
part = normalize(part);
|
||||
if (part.startsWith("var(")) return true;
|
||||
if ((0, _color.parseColor)(part, {
|
||||
loose: true
|
||||
}) !== null) return colors++, true;
|
||||
return false;
|
||||
});
|
||||
if (!result) return false;
|
||||
return colors > 0;
|
||||
}
|
||||
function image(value) {
|
||||
let images = 0;
|
||||
let result = (0, _splitAtTopLevelOnly.splitAtTopLevelOnly)(value, ",").every((part)=>{
|
||||
part = normalize(part);
|
||||
if (part.startsWith("var(")) return true;
|
||||
if (url(part) || gradient(part) || [
|
||||
"element(",
|
||||
"image(",
|
||||
"cross-fade(",
|
||||
"image-set("
|
||||
].some((fn)=>part.startsWith(fn))) {
|
||||
images++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (!result) return false;
|
||||
return images > 0;
|
||||
}
|
||||
let gradientTypes = new Set([
|
||||
"linear-gradient",
|
||||
"radial-gradient",
|
||||
"repeating-linear-gradient",
|
||||
"repeating-radial-gradient",
|
||||
"conic-gradient"
|
||||
]);
|
||||
function gradient(value) {
|
||||
value = normalize(value);
|
||||
for (let type of gradientTypes){
|
||||
if (value.startsWith(`${type}(`)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
let validPositions = new Set([
|
||||
"center",
|
||||
"top",
|
||||
"right",
|
||||
"bottom",
|
||||
"left"
|
||||
]);
|
||||
function position(value) {
|
||||
let positions = 0;
|
||||
let result = (0, _splitAtTopLevelOnly.splitAtTopLevelOnly)(value, "_").every((part)=>{
|
||||
part = normalize(part);
|
||||
if (part.startsWith("var(")) return true;
|
||||
if (validPositions.has(part) || length(part) || percentage(part)) {
|
||||
positions++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
if (!result) return false;
|
||||
return positions > 0;
|
||||
}
|
||||
function familyName(value) {
|
||||
let fonts = 0;
|
||||
let result = (0, _splitAtTopLevelOnly.splitAtTopLevelOnly)(value, ",").every((part)=>{
|
||||
part = normalize(part);
|
||||
if (part.startsWith("var(")) return true;
|
||||
// If it contains spaces, then it should be quoted
|
||||
if (part.includes(" ")) {
|
||||
if (!/(['"])([^"']+)\1/g.test(part)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// If it starts with a number, it's invalid
|
||||
if (/^\d/g.test(part)) {
|
||||
return false;
|
||||
}
|
||||
fonts++;
|
||||
return true;
|
||||
});
|
||||
if (!result) return false;
|
||||
return fonts > 0;
|
||||
}
|
||||
let genericNames = new Set([
|
||||
"serif",
|
||||
"sans-serif",
|
||||
"monospace",
|
||||
"cursive",
|
||||
"fantasy",
|
||||
"system-ui",
|
||||
"ui-serif",
|
||||
"ui-sans-serif",
|
||||
"ui-monospace",
|
||||
"ui-rounded",
|
||||
"math",
|
||||
"emoji",
|
||||
"fangsong"
|
||||
]);
|
||||
function genericName(value) {
|
||||
return genericNames.has(value);
|
||||
}
|
||||
let absoluteSizes = new Set([
|
||||
"xx-small",
|
||||
"x-small",
|
||||
"small",
|
||||
"medium",
|
||||
"large",
|
||||
"x-large",
|
||||
"x-large",
|
||||
"xxx-large"
|
||||
]);
|
||||
function absoluteSize(value) {
|
||||
return absoluteSizes.has(value);
|
||||
}
|
||||
let relativeSizes = new Set([
|
||||
"larger",
|
||||
"smaller"
|
||||
]);
|
||||
function relativeSize(value) {
|
||||
return relativeSizes.has(value);
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"J D E F A B CC"},B:{"1":"P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H","2":"C K L G M N O"},C:{"1":"SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB","2":"0 1 2 3 4 5 6 7 8 9 DC tB I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB EC FC"},D:{"1":"ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB GC","2":"0 1 2 3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB","194":"uB"},E:{"1":"B C K L G 0B qB rB 1B MC NC 2B 3B 4B 5B sB 6B 7B 8B 9B OC","2":"I v J D E F A HC zB IC JC KC LC"},F:{"1":"NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e","2":"0 1 2 3 4 5 6 7 8 9 F B C G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB PC QC RC SC qB AC TC rB","194":"MB"},G:{"1":"cC dC eC fC gC hC iC jC kC lC mC nC 2B 3B 4B 5B sB 6B 7B 8B 9B","2":"E zB UC BC VC WC XC YC ZC aC bC"},H:{"2":"oC"},I:{"1":"f","2":"tB I pC qC rC sC BC tC uC"},J:{"2":"D A"},K:{"1":"h","2":"A B C qB AC rB"},L:{"1":"H"},M:{"1":"H"},N:{"2":"A B"},O:{"1":"vC"},P:{"1":"g zC 0C 0B 1C 2C 3C 4C 5C sB 6C 7C 8C","2":"I wC xC yC"},Q:{"1":"1B"},R:{"1":"9C"},S:{"1":"BD","2":"AD"}},B:7,C:":focus-within CSS pseudo-class"};
|
||||
@@ -0,0 +1,17 @@
|
||||
import {fileURLToPath} from 'node:url';
|
||||
import {Transform} from 'node:stream';
|
||||
|
||||
export const toPath = urlOrPath => urlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath;
|
||||
|
||||
export class FilterStream extends Transform {
|
||||
constructor(filter) {
|
||||
super({
|
||||
objectMode: true,
|
||||
transform(data, encoding, callback) {
|
||||
callback(undefined, filter(data) ? data : undefined);
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const isNegativePattern = pattern => pattern[0] === '!';
|
||||
@@ -0,0 +1,3 @@
|
||||
export declare const reserved: Set<string>;
|
||||
export declare function is_valid(str: string): boolean;
|
||||
export declare function sanitize(name: string): string;
|
||||
@@ -0,0 +1 @@
|
||||
module.exports={A:{A:{"2":"J D E F A B CC"},B:{"2":"C K L G M N O P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H"},C:{"2":"0 1 2 3 4 5 6 7 8 9 DC tB I v J D E F A B C K L G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB EC FC"},D:{"2":"3 4 5 6 7 8 9 I v J D E F A B C K L G M N O w AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB uB ZB vB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R S T U V W X Y Z a b c d e i j k l m n o p q r s t u f H xB yB GC","66":"0 1 2 g x y z"},E:{"2":"I v J E F A B C K L G HC zB IC JC LC 0B qB rB 1B MC NC 2B 3B 4B 5B sB 6B 7B 8B 9B OC","130":"D KC"},F:{"2":"0 1 2 3 4 5 6 7 8 9 F B C G M N O w g x y z AB BB CB DB EB FB GB HB IB JB KB LB MB NB OB PB QB RB SB TB UB VB WB XB YB ZB aB bB cB dB eB fB gB hB iB jB kB h lB mB nB oB pB P Q R wB S T U V W X Y Z a b c d e PC QC RC SC qB AC TC rB"},G:{"2":"E zB UC BC VC WC YC ZC aC bC cC dC eC fC gC hC iC jC kC lC mC nC 2B 3B 4B 5B sB 6B 7B 8B 9B","130":"XC"},H:{"2":"oC"},I:{"2":"tB I f pC qC rC sC BC tC uC"},J:{"2":"D A"},K:{"2":"A B C h qB AC rB"},L:{"2":"H"},M:{"2":"H"},N:{"2":"A B"},O:{"2":"vC"},P:{"2":"I g wC xC yC zC 0C 0B 1C 2C 3C 4C 5C sB 6C 7C 8C"},Q:{"2":"1B"},R:{"2":"9C"},S:{"2":"AD BD"}},B:7,C:"seamless attribute for iframes"};
|
||||
@@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
var callBound = require('call-bind/callBound');
|
||||
var $replace = callBound('String.prototype.replace');
|
||||
|
||||
var mvsIsWS = (/^\s$/).test('\u180E');
|
||||
/* eslint-disable no-control-regex */
|
||||
var endWhitespace = mvsIsWS
|
||||
? /[\x09\x0A\x0B\x0C\x0D\x20\xA0\u1680\u180E\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F\u3000\u2028\u2029\uFEFF]+$/
|
||||
: /[\x09\x0A\x0B\x0C\x0D\x20\xA0\u1680\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F\u3000\u2028\u2029\uFEFF]+$/;
|
||||
/* eslint-enable no-control-regex */
|
||||
|
||||
module.exports = function trimEnd() {
|
||||
return $replace(this, endWhitespace, '');
|
||||
};
|
||||
@@ -0,0 +1,301 @@
|
||||
# Change Log
|
||||
|
||||
## 0.5.6
|
||||
|
||||
* Fix for regression when people were using numbers as names in source maps. See
|
||||
#236.
|
||||
|
||||
## 0.5.5
|
||||
|
||||
* Fix "regression" of unsupported, implementation behavior that half the world
|
||||
happens to have come to depend on. See #235.
|
||||
|
||||
* Fix regression involving function hoisting in SpiderMonkey. See #233.
|
||||
|
||||
## 0.5.4
|
||||
|
||||
* Large performance improvements to source-map serialization. See #228 and #229.
|
||||
|
||||
## 0.5.3
|
||||
|
||||
* Do not include unnecessary distribution files. See
|
||||
commit ef7006f8d1647e0a83fdc60f04f5a7ca54886f86.
|
||||
|
||||
## 0.5.2
|
||||
|
||||
* Include browser distributions of the library in package.json's `files`. See
|
||||
issue #212.
|
||||
|
||||
## 0.5.1
|
||||
|
||||
* Fix latent bugs in IndexedSourceMapConsumer.prototype._parseMappings. See
|
||||
ff05274becc9e6e1295ed60f3ea090d31d843379.
|
||||
|
||||
## 0.5.0
|
||||
|
||||
* Node 0.8 is no longer supported.
|
||||
|
||||
* Use webpack instead of dryice for bundling.
|
||||
|
||||
* Big speedups serializing source maps. See pull request #203.
|
||||
|
||||
* Fix a bug with `SourceMapConsumer.prototype.sourceContentFor` and sources that
|
||||
explicitly start with the source root. See issue #199.
|
||||
|
||||
## 0.4.4
|
||||
|
||||
* Fix an issue where using a `SourceMapGenerator` after having created a
|
||||
`SourceMapConsumer` from it via `SourceMapConsumer.fromSourceMap` failed. See
|
||||
issue #191.
|
||||
|
||||
* Fix an issue with where `SourceMapGenerator` would mistakenly consider
|
||||
different mappings as duplicates of each other and avoid generating them. See
|
||||
issue #192.
|
||||
|
||||
## 0.4.3
|
||||
|
||||
* A very large number of performance improvements, particularly when parsing
|
||||
source maps. Collectively about 75% of time shaved off of the source map
|
||||
parsing benchmark!
|
||||
|
||||
* Fix a bug in `SourceMapConsumer.prototype.allGeneratedPositionsFor` and fuzzy
|
||||
searching in the presence of a column option. See issue #177.
|
||||
|
||||
* Fix a bug with joining a source and its source root when the source is above
|
||||
the root. See issue #182.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.hasContentsOfAllSources` method to
|
||||
determine when all sources' contents are inlined into the source map. See
|
||||
issue #190.
|
||||
|
||||
## 0.4.2
|
||||
|
||||
* Add an `.npmignore` file so that the benchmarks aren't pulled down by
|
||||
dependent projects. Issue #169.
|
||||
|
||||
* Add an optional `column` argument to
|
||||
`SourceMapConsumer.prototype.allGeneratedPositionsFor` and better handle lines
|
||||
with no mappings. Issues #172 and #173.
|
||||
|
||||
## 0.4.1
|
||||
|
||||
* Fix accidentally defining a global variable. #170.
|
||||
|
||||
## 0.4.0
|
||||
|
||||
* The default direction for fuzzy searching was changed back to its original
|
||||
direction. See #164.
|
||||
|
||||
* There is now a `bias` option you can supply to `SourceMapConsumer` to control
|
||||
the fuzzy searching direction. See #167.
|
||||
|
||||
* About an 8% speed up in parsing source maps. See #159.
|
||||
|
||||
* Added a benchmark for parsing and generating source maps.
|
||||
|
||||
## 0.3.0
|
||||
|
||||
* Change the default direction that searching for positions fuzzes when there is
|
||||
not an exact match. See #154.
|
||||
|
||||
* Support for environments using json2.js for JSON serialization. See #156.
|
||||
|
||||
## 0.2.0
|
||||
|
||||
* Support for consuming "indexed" source maps which do not have any remote
|
||||
sections. See pull request #127. This introduces a minor backwards
|
||||
incompatibility if you are monkey patching `SourceMapConsumer.prototype`
|
||||
methods.
|
||||
|
||||
## 0.1.43
|
||||
|
||||
* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue
|
||||
#148 for some discussion and issues #150, #151, and #152 for implementations.
|
||||
|
||||
## 0.1.42
|
||||
|
||||
* Fix an issue where `SourceNode`s from different versions of the source-map
|
||||
library couldn't be used in conjunction with each other. See issue #142.
|
||||
|
||||
## 0.1.41
|
||||
|
||||
* Fix a bug with getting the source content of relative sources with a "./"
|
||||
prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768).
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the
|
||||
column span of each mapping.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find
|
||||
all generated positions associated with a given original source and line.
|
||||
|
||||
## 0.1.40
|
||||
|
||||
* Performance improvements for parsing source maps in SourceMapConsumer.
|
||||
|
||||
## 0.1.39
|
||||
|
||||
* Fix a bug where setting a source's contents to null before any source content
|
||||
had been set before threw a TypeError. See issue #131.
|
||||
|
||||
## 0.1.38
|
||||
|
||||
* Fix a bug where finding relative paths from an empty path were creating
|
||||
absolute paths. See issue #129.
|
||||
|
||||
## 0.1.37
|
||||
|
||||
* Fix a bug where if the source root was an empty string, relative source paths
|
||||
would turn into absolute source paths. Issue #124.
|
||||
|
||||
## 0.1.36
|
||||
|
||||
* Allow the `names` mapping property to be an empty string. Issue #121.
|
||||
|
||||
## 0.1.35
|
||||
|
||||
* A third optional parameter was added to `SourceNode.fromStringWithSourceMap`
|
||||
to specify a path that relative sources in the second parameter should be
|
||||
relative to. Issue #105.
|
||||
|
||||
* If no file property is given to a `SourceMapGenerator`, then the resulting
|
||||
source map will no longer have a `null` file property. The property will
|
||||
simply not exist. Issue #104.
|
||||
|
||||
* Fixed a bug where consecutive newlines were ignored in `SourceNode`s.
|
||||
Issue #116.
|
||||
|
||||
## 0.1.34
|
||||
|
||||
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
|
||||
|
||||
* Fix bug involving source contents and the
|
||||
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
|
||||
|
||||
## 0.1.33
|
||||
|
||||
* Fix some edge cases surrounding path joining and URL resolution.
|
||||
|
||||
* Add a third parameter for relative path to
|
||||
`SourceMapGenerator.prototype.applySourceMap`.
|
||||
|
||||
* Fix issues with mappings and EOLs.
|
||||
|
||||
## 0.1.32
|
||||
|
||||
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
|
||||
(issue 92).
|
||||
|
||||
* Fixed test runner to actually report number of failed tests as its process
|
||||
exit code.
|
||||
|
||||
* Fixed a typo when reporting bad mappings (issue 87).
|
||||
|
||||
## 0.1.31
|
||||
|
||||
* Delay parsing the mappings in SourceMapConsumer until queried for a source
|
||||
location.
|
||||
|
||||
* Support Sass source maps (which at the time of writing deviate from the spec
|
||||
in small ways) in SourceMapConsumer.
|
||||
|
||||
## 0.1.30
|
||||
|
||||
* Do not join source root with a source, when the source is a data URI.
|
||||
|
||||
* Extend the test runner to allow running single specific test files at a time.
|
||||
|
||||
* Performance improvements in `SourceNode.prototype.walk` and
|
||||
`SourceMapConsumer.prototype.eachMapping`.
|
||||
|
||||
* Source map browser builds will now work inside Workers.
|
||||
|
||||
* Better error messages when attempting to add an invalid mapping to a
|
||||
`SourceMapGenerator`.
|
||||
|
||||
## 0.1.29
|
||||
|
||||
* Allow duplicate entries in the `names` and `sources` arrays of source maps
|
||||
(usually from TypeScript) we are parsing. Fixes github issue 72.
|
||||
|
||||
## 0.1.28
|
||||
|
||||
* Skip duplicate mappings when creating source maps from SourceNode; github
|
||||
issue 75.
|
||||
|
||||
## 0.1.27
|
||||
|
||||
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
|
||||
we don't use it anyway.
|
||||
|
||||
## 0.1.26
|
||||
|
||||
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
|
||||
|
||||
## 0.1.25
|
||||
|
||||
* Make compatible with browserify
|
||||
|
||||
## 0.1.24
|
||||
|
||||
* Fix issue with absolute paths and `file://` URIs. See
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
|
||||
|
||||
## 0.1.23
|
||||
|
||||
* Fix issue with absolute paths and sourcesContent, github issue 64.
|
||||
|
||||
## 0.1.22
|
||||
|
||||
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
|
||||
|
||||
## 0.1.21
|
||||
|
||||
* Fixed handling of sources that start with a slash so that they are relative to
|
||||
the source root's host.
|
||||
|
||||
## 0.1.20
|
||||
|
||||
* Fixed github issue #43: absolute URLs aren't joined with the source root
|
||||
anymore.
|
||||
|
||||
## 0.1.19
|
||||
|
||||
* Using Travis CI to run tests.
|
||||
|
||||
## 0.1.18
|
||||
|
||||
* Fixed a bug in the handling of sourceRoot.
|
||||
|
||||
## 0.1.17
|
||||
|
||||
* Added SourceNode.fromStringWithSourceMap.
|
||||
|
||||
## 0.1.16
|
||||
|
||||
* Added missing documentation.
|
||||
|
||||
* Fixed the generating of empty mappings in SourceNode.
|
||||
|
||||
## 0.1.15
|
||||
|
||||
* Added SourceMapGenerator.applySourceMap.
|
||||
|
||||
## 0.1.14
|
||||
|
||||
* The sourceRoot is now handled consistently.
|
||||
|
||||
## 0.1.13
|
||||
|
||||
* Added SourceMapGenerator.fromSourceMap.
|
||||
|
||||
## 0.1.12
|
||||
|
||||
* SourceNode now generates empty mappings too.
|
||||
|
||||
## 0.1.11
|
||||
|
||||
* Added name support to SourceNode.
|
||||
|
||||
## 0.1.10
|
||||
|
||||
* Added sourcesContent support to the customer and generator.
|
||||
@@ -0,0 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = require('./async').angelfall;
|
||||
@@ -0,0 +1,120 @@
|
||||
"use strict";
|
||||
var firstLineError;
|
||||
try {throw new Error(); } catch (e) {firstLineError = e;}
|
||||
var schedule = require("./schedule");
|
||||
var Queue = require("./queue");
|
||||
|
||||
function Async() {
|
||||
this._customScheduler = false;
|
||||
this._isTickUsed = false;
|
||||
this._lateQueue = new Queue(16);
|
||||
this._normalQueue = new Queue(16);
|
||||
this._haveDrainedQueues = false;
|
||||
var self = this;
|
||||
this.drainQueues = function () {
|
||||
self._drainQueues();
|
||||
};
|
||||
this._schedule = schedule;
|
||||
}
|
||||
|
||||
Async.prototype.setScheduler = function(fn) {
|
||||
var prev = this._schedule;
|
||||
this._schedule = fn;
|
||||
this._customScheduler = true;
|
||||
return prev;
|
||||
};
|
||||
|
||||
Async.prototype.hasCustomScheduler = function() {
|
||||
return this._customScheduler;
|
||||
};
|
||||
|
||||
Async.prototype.haveItemsQueued = function () {
|
||||
return this._isTickUsed || this._haveDrainedQueues;
|
||||
};
|
||||
|
||||
|
||||
Async.prototype.fatalError = function(e, isNode) {
|
||||
if (isNode) {
|
||||
process.stderr.write("Fatal " + (e instanceof Error ? e.stack : e) +
|
||||
"\n");
|
||||
process.exit(2);
|
||||
} else {
|
||||
this.throwLater(e);
|
||||
}
|
||||
};
|
||||
|
||||
Async.prototype.throwLater = function(fn, arg) {
|
||||
if (arguments.length === 1) {
|
||||
arg = fn;
|
||||
fn = function () { throw arg; };
|
||||
}
|
||||
if (typeof setTimeout !== "undefined") {
|
||||
setTimeout(function() {
|
||||
fn(arg);
|
||||
}, 0);
|
||||
} else try {
|
||||
this._schedule(function() {
|
||||
fn(arg);
|
||||
});
|
||||
} catch (e) {
|
||||
throw new Error("No async scheduler available\u000a\u000a See http://goo.gl/MqrFmX\u000a");
|
||||
}
|
||||
};
|
||||
|
||||
function AsyncInvokeLater(fn, receiver, arg) {
|
||||
this._lateQueue.push(fn, receiver, arg);
|
||||
this._queueTick();
|
||||
}
|
||||
|
||||
function AsyncInvoke(fn, receiver, arg) {
|
||||
this._normalQueue.push(fn, receiver, arg);
|
||||
this._queueTick();
|
||||
}
|
||||
|
||||
function AsyncSettlePromises(promise) {
|
||||
this._normalQueue._pushOne(promise);
|
||||
this._queueTick();
|
||||
}
|
||||
|
||||
Async.prototype.invokeLater = AsyncInvokeLater;
|
||||
Async.prototype.invoke = AsyncInvoke;
|
||||
Async.prototype.settlePromises = AsyncSettlePromises;
|
||||
|
||||
|
||||
function _drainQueue(queue) {
|
||||
while (queue.length() > 0) {
|
||||
_drainQueueStep(queue);
|
||||
}
|
||||
}
|
||||
|
||||
function _drainQueueStep(queue) {
|
||||
var fn = queue.shift();
|
||||
if (typeof fn !== "function") {
|
||||
fn._settlePromises();
|
||||
} else {
|
||||
var receiver = queue.shift();
|
||||
var arg = queue.shift();
|
||||
fn.call(receiver, arg);
|
||||
}
|
||||
}
|
||||
|
||||
Async.prototype._drainQueues = function () {
|
||||
_drainQueue(this._normalQueue);
|
||||
this._reset();
|
||||
this._haveDrainedQueues = true;
|
||||
_drainQueue(this._lateQueue);
|
||||
};
|
||||
|
||||
Async.prototype._queueTick = function () {
|
||||
if (!this._isTickUsed) {
|
||||
this._isTickUsed = true;
|
||||
this._schedule(this.drainQueues);
|
||||
}
|
||||
};
|
||||
|
||||
Async.prototype._reset = function () {
|
||||
this._isTickUsed = false;
|
||||
};
|
||||
|
||||
module.exports = Async;
|
||||
module.exports.firstLineError = firstLineError;
|
||||
@@ -0,0 +1,15 @@
|
||||
var metaMap = require('./_metaMap'),
|
||||
noop = require('./noop');
|
||||
|
||||
/**
|
||||
* Gets metadata for `func`.
|
||||
*
|
||||
* @private
|
||||
* @param {Function} func The function to query.
|
||||
* @returns {*} Returns the metadata for `func`.
|
||||
*/
|
||||
var getData = !metaMap ? noop : function(func) {
|
||||
return metaMap.get(func);
|
||||
};
|
||||
|
||||
module.exports = getData;
|
||||
@@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
var $StringValueOf = require('call-bind/callBound')('String.prototype.valueOf');
|
||||
|
||||
var Type = require('./Type');
|
||||
|
||||
// https://262.ecma-international.org/6.0/#sec-properties-of-the-string-prototype-object
|
||||
|
||||
module.exports = function thisStringValue(value) {
|
||||
if (Type(value) === 'String') {
|
||||
return value;
|
||||
}
|
||||
|
||||
return $StringValueOf(value);
|
||||
};
|
||||
@@ -0,0 +1,3 @@
|
||||
export function createObject(keys: string[], values: any[]) {
|
||||
return keys.reduce((result, key, i) => ((result[key] = values[i]), result), {} as any);
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import type { FormDataLike } from "../FormDataLike.js";
|
||||
/**
|
||||
* Check if given object is FormData
|
||||
*
|
||||
* @param value an object to test
|
||||
*/
|
||||
export declare const isFormData: (value: unknown) => value is FormDataLike;
|
||||
@@ -0,0 +1,39 @@
|
||||
import process from 'node:process';
|
||||
import restoreCursor from 'restore-cursor';
|
||||
|
||||
let isHidden = false;
|
||||
|
||||
const cliCursor = {};
|
||||
|
||||
cliCursor.show = (writableStream = process.stderr) => {
|
||||
if (!writableStream.isTTY) {
|
||||
return;
|
||||
}
|
||||
|
||||
isHidden = false;
|
||||
writableStream.write('\u001B[?25h');
|
||||
};
|
||||
|
||||
cliCursor.hide = (writableStream = process.stderr) => {
|
||||
if (!writableStream.isTTY) {
|
||||
return;
|
||||
}
|
||||
|
||||
restoreCursor();
|
||||
isHidden = true;
|
||||
writableStream.write('\u001B[?25l');
|
||||
};
|
||||
|
||||
cliCursor.toggle = (force, writableStream) => {
|
||||
if (force !== undefined) {
|
||||
isHidden = force;
|
||||
}
|
||||
|
||||
if (isHidden) {
|
||||
cliCursor.show(writableStream);
|
||||
} else {
|
||||
cliCursor.hide(writableStream);
|
||||
}
|
||||
};
|
||||
|
||||
export default cliCursor;
|
||||
@@ -0,0 +1,23 @@
|
||||
import { operate } from '../util/lift';
|
||||
import { noop } from '../util/noop';
|
||||
import { createOperatorSubscriber } from './OperatorSubscriber';
|
||||
import { innerFrom } from '../observable/innerFrom';
|
||||
export function bufferWhen(closingSelector) {
|
||||
return operate(function (source, subscriber) {
|
||||
var buffer = null;
|
||||
var closingSubscriber = null;
|
||||
var openBuffer = function () {
|
||||
closingSubscriber === null || closingSubscriber === void 0 ? void 0 : closingSubscriber.unsubscribe();
|
||||
var b = buffer;
|
||||
buffer = [];
|
||||
b && subscriber.next(b);
|
||||
innerFrom(closingSelector()).subscribe((closingSubscriber = createOperatorSubscriber(subscriber, openBuffer, noop)));
|
||||
};
|
||||
openBuffer();
|
||||
source.subscribe(createOperatorSubscriber(subscriber, function (value) { return buffer === null || buffer === void 0 ? void 0 : buffer.push(value); }, function () {
|
||||
buffer && subscriber.next(buffer);
|
||||
subscriber.complete();
|
||||
}, undefined, function () { return (buffer = closingSubscriber = null); }));
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=bufferWhen.js.map
|
||||
@@ -0,0 +1 @@
|
||||
module.exports = 'B';
|
||||
@@ -0,0 +1,64 @@
|
||||
'use strict';
|
||||
|
||||
var GetIntrinsic = require('get-intrinsic');
|
||||
var callBound = require('call-bind/callBound');
|
||||
|
||||
var $TypeError = GetIntrinsic('%TypeError%');
|
||||
|
||||
var DeletePropertyOrThrow = require('./DeletePropertyOrThrow');
|
||||
var Get = require('./Get');
|
||||
var HasProperty = require('./HasProperty');
|
||||
var IsIntegralNumber = require('./IsIntegralNumber');
|
||||
var Set = require('./Set');
|
||||
var ToString = require('./ToString');
|
||||
var Type = require('./Type');
|
||||
|
||||
var isAbstractClosure = require('../helpers/isAbstractClosure');
|
||||
|
||||
var $push = callBound('Array.prototype.push');
|
||||
var $sort = callBound('Array.prototype.sort');
|
||||
|
||||
// https://262.ecma-international.org/13.0/#sec-sortindexedproperties
|
||||
|
||||
module.exports = function SortIndexedProperties(obj, len, SortCompare) {
|
||||
if (Type(obj) !== 'Object') {
|
||||
throw new $TypeError('Assertion failed: Type(obj) is not Object');
|
||||
}
|
||||
if (!IsIntegralNumber(len) || len < 0) {
|
||||
throw new $TypeError('Assertion failed: `len` must be an integer >= 0');
|
||||
}
|
||||
if (!isAbstractClosure(SortCompare) || SortCompare.length !== 2) {
|
||||
throw new $TypeError('Assertion failed: `SortCompare` must be an abstract closure taking 2 arguments');
|
||||
}
|
||||
|
||||
var items = []; // step 1
|
||||
|
||||
var k = 0; // step 2
|
||||
|
||||
while (k < len) { // step 3
|
||||
var Pk = ToString(k);
|
||||
var kPresent = HasProperty(obj, Pk);
|
||||
if (kPresent) {
|
||||
var kValue = Get(obj, Pk);
|
||||
$push(items, kValue);
|
||||
}
|
||||
k += 1;
|
||||
}
|
||||
|
||||
var itemCount = items.length; // step 4
|
||||
|
||||
$sort(items, SortCompare); // step 5
|
||||
|
||||
var j = 0; // step 6
|
||||
|
||||
while (j < itemCount) { // step 7
|
||||
Set(obj, ToString(j), items[j], true);
|
||||
j += 1;
|
||||
}
|
||||
|
||||
while (j < len) { // step 8
|
||||
DeletePropertyOrThrow(obj, ToString(j));
|
||||
j += 1;
|
||||
}
|
||||
return obj; // step 9
|
||||
};
|
||||
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
Convert a tuple/array into a union type of its elements.
|
||||
|
||||
This can be useful when you have a fixed set of allowed values and want a type defining only the allowed values, but do not want to repeat yourself.
|
||||
|
||||
@example
|
||||
```
|
||||
import type {TupleToUnion} from 'type-fest';
|
||||
|
||||
const destinations = ['a', 'b', 'c'] as const;
|
||||
|
||||
type Destination = TupleToUnion<typeof destinations>;
|
||||
//=> 'a' | 'b' | 'c'
|
||||
|
||||
function verifyDestination(destination: unknown): destination is Destination {
|
||||
return destinations.includes(destination as any);
|
||||
}
|
||||
|
||||
type RequestBody = {
|
||||
deliverTo: Destination;
|
||||
};
|
||||
|
||||
function verifyRequestBody(body: unknown): body is RequestBody {
|
||||
const deliverTo = (body as any).deliverTo;
|
||||
return typeof body === 'object' && body !== null && verifyDestination(deliverTo);
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, you may use `typeof destinations[number]`. If `destinations` is a tuple, there is no difference. However if `destinations` is a string, the resulting type will the union of the characters in the string. Other types of `destinations` may result in a compile error. In comparison, TupleToUnion will return `never` if a tuple is not provided.
|
||||
|
||||
@example
|
||||
```
|
||||
const destinations = ['a', 'b', 'c'] as const;
|
||||
|
||||
type Destination = typeof destinations[number];
|
||||
//=> 'a' | 'b' | 'c'
|
||||
|
||||
const erroringType = new Set(['a', 'b', 'c']);
|
||||
|
||||
type ErroringType = typeof erroringType[number];
|
||||
//=> Type 'Set<string>' has no matching index signature for type 'number'. ts(2537)
|
||||
|
||||
const numberBool: { [n: number]: boolean } = { 1: true };
|
||||
|
||||
type NumberBool = typeof numberBool[number];
|
||||
//=> boolean
|
||||
```
|
||||
|
||||
@category Array
|
||||
*/
|
||||
export type TupleToUnion<ArrayType> = ArrayType extends readonly unknown[] ? ArrayType[number] : never;
|
||||
@@ -0,0 +1,4 @@
|
||||
export type ScanStationIdsNotMatchingError = {
|
||||
name: string;
|
||||
message: string;
|
||||
};
|
||||
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* This class is mostly based on Flux's Dispatcher by Facebook
|
||||
* https://github.com/facebook/flux/blob/master/src/Dispatcher.js
|
||||
*/
|
||||
export default class Dispatcher<TPayload> {
|
||||
_callbacks: {
|
||||
[key: string]: (payload: TPayload) => void;
|
||||
};
|
||||
_isDispatching: boolean;
|
||||
_isHandled: {
|
||||
[key: string]: boolean;
|
||||
};
|
||||
_isPending: {
|
||||
[key: string]: boolean;
|
||||
};
|
||||
_lastID: number;
|
||||
_pendingPayload: TPayload;
|
||||
constructor();
|
||||
/**
|
||||
* Registers a callback to be invoked with every dispatched payload. Returns
|
||||
* a token that can be used with `waitFor()`.
|
||||
*/
|
||||
register(callback: (payload: TPayload) => void): string;
|
||||
/**
|
||||
* Removes a callback based on its token.
|
||||
*/
|
||||
unregister(id: string): void;
|
||||
/**
|
||||
* Waits for the callbacks specified to be invoked before continuing execution
|
||||
* of the current callback. This method should only be used by a callback in
|
||||
* response to a dispatched payload.
|
||||
*/
|
||||
waitFor(ids: Array<string>): void;
|
||||
/**
|
||||
* Dispatches a payload to all registered callbacks.
|
||||
*/
|
||||
dispatch(payload: TPayload): void;
|
||||
/**
|
||||
* Is this Dispatcher currently dispatching.
|
||||
*/
|
||||
isDispatching(): boolean;
|
||||
/**
|
||||
* Call the callback stored with the given id. Also do some internal
|
||||
* bookkeeping.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
private _invokeCallback;
|
||||
/**
|
||||
* Set up bookkeeping needed when dispatching.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
private _startDispatching;
|
||||
/**
|
||||
* Clear bookkeeping used for dispatching.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
private _stopDispatching;
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
import { Observable } from '../Observable';
|
||||
import { ObservableInput, ObservedValueOf, ReadableStreamLike } from '../types';
|
||||
export declare function innerFrom<O extends ObservableInput<any>>(input: O): Observable<ObservedValueOf<O>>;
|
||||
/**
|
||||
* Creates an RxJS Observable from an object that implements `Symbol.observable`.
|
||||
* @param obj An object that properly implements `Symbol.observable`.
|
||||
*/
|
||||
export declare function fromInteropObservable<T>(obj: any): Observable<T>;
|
||||
/**
|
||||
* Synchronously emits the values of an array like and completes.
|
||||
* This is exported because there are creation functions and operators that need to
|
||||
* make direct use of the same logic, and there's no reason to make them run through
|
||||
* `from` conditionals because we *know* they're dealing with an array.
|
||||
* @param array The array to emit values from
|
||||
*/
|
||||
export declare function fromArrayLike<T>(array: ArrayLike<T>): Observable<T>;
|
||||
export declare function fromPromise<T>(promise: PromiseLike<T>): Observable<T>;
|
||||
export declare function fromIterable<T>(iterable: Iterable<T>): Observable<T>;
|
||||
export declare function fromAsyncIterable<T>(asyncIterable: AsyncIterable<T>): Observable<T>;
|
||||
export declare function fromReadableStreamLike<T>(readableStream: ReadableStreamLike<T>): Observable<T>;
|
||||
//# sourceMappingURL=innerFrom.d.ts.map
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,2 @@
|
||||
if(typeof cptable === 'undefined') cptable = {};
|
||||
cptable[28598] = (function(){ var d = "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~
<C29F>¢£¤¥¦§¨©×«¬®¯°±²³´µ¶·¸¹÷»¼½¾<C2BD><C2BE><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>‗אבגדהוזחטיךכלםמןנסעףפץצקרשת<D7A9><D7AA><E2808E>", D = [], e = {}; for(var i=0;i!=d.length;++i) { if(d.charCodeAt(i) !== 0xFFFD) e[d.charAt(i)] = i; D[i] = d.charAt(i); } return {"enc": e, "dec": D }; })();
|
||||
@@ -0,0 +1,180 @@
|
||||
function csvtojson() {
|
||||
var Converter = require("../v2").Converter;
|
||||
var fs = require("fs");
|
||||
var options = require("./options.json");
|
||||
var cmds = options.commands;
|
||||
var opts = options.options;
|
||||
var exps = options.examples;
|
||||
var pkg = require("../package.json");
|
||||
var os = require("os");
|
||||
/**
|
||||
*{
|
||||
"cmd": "parse", command to run
|
||||
"options": {}, options to passe to the command
|
||||
"inputStream": process.stdin // input stream for the command. default is stdin. can be a file read stream.
|
||||
};
|
||||
*
|
||||
*/
|
||||
var parsedCmd;
|
||||
|
||||
function _showHelp(errno) {
|
||||
var key;
|
||||
errno = typeof errno === "number" ? errno : 0;
|
||||
console.log("csvtojson: Convert csv to JSON format");
|
||||
console.log("version:", pkg.version);
|
||||
console.log("Usage: csvtojson [<command>] [<options>] filepath\n");
|
||||
console.log("Commands: ");
|
||||
for (key in cmds) {
|
||||
if (cmds.hasOwnProperty(key)) {
|
||||
console.log("\t%s: %s", key, cmds[key]);
|
||||
}
|
||||
}
|
||||
console.log("Options: ");
|
||||
for (key in opts) {
|
||||
if (opts.hasOwnProperty(key)) {
|
||||
console.log("\t%s: %s", key, opts[key].desc);
|
||||
}
|
||||
}
|
||||
console.log("Examples: ");
|
||||
for (var i = 0; i < exps.length; i++) {
|
||||
console.log("\t%s", exps[i]);
|
||||
}
|
||||
process.exit(errno);
|
||||
}
|
||||
function stringToRegExp(str) {
|
||||
var lastSlash = str.lastIndexOf("/");
|
||||
var source = str.substring(1, lastSlash);
|
||||
var flag = str.substring(lastSlash + 1);
|
||||
return new RegExp(source,flag);
|
||||
}
|
||||
function parse() {
|
||||
var is = parsedCmd.inputStream;
|
||||
if (parsedCmd.options.maxRowLength === undefined) {
|
||||
parsedCmd.options.maxRowLength = 10240;
|
||||
}
|
||||
if (is === process.stdin && is.isTTY) {
|
||||
console.log("Please specify csv file path or pipe the csv data through.\n");
|
||||
_showHelp(1);
|
||||
}
|
||||
if (parsedCmd.options.delimiter === "\\t") {
|
||||
parsedCmd.options.delimiter = "\t";
|
||||
}
|
||||
if (parsedCmd.options.ignoreColumns) {
|
||||
parsedCmd.options.ignoreColumns=stringToRegExp(parsedCmd.options.ignoreColumns);
|
||||
|
||||
}
|
||||
if (parsedCmd.options.includeColumns) {
|
||||
parsedCmd.options.includeColumns=stringToRegExp(parsedCmd.options.includeColumns);
|
||||
|
||||
}
|
||||
var conv = new Converter(parsedCmd.options);
|
||||
var isFirst = true;
|
||||
conv.on("error", function (err, pos) {
|
||||
if (!parsedCmd.options.quiet) {
|
||||
console.error("csvtojson got an error: ", err);
|
||||
if (pos) {
|
||||
console.error("The error happens at following line: ");
|
||||
console.log(pos);
|
||||
}
|
||||
}
|
||||
process.exit(1);
|
||||
})
|
||||
.on("data",function (dataStr) {
|
||||
process.stdout.write((isFirst ? "" : "," + os.EOL) + dataStr.toString().substr(0,dataStr.length-1));
|
||||
isFirst = false;
|
||||
})
|
||||
.on("done", function () {
|
||||
console.log(os.EOL + "]");
|
||||
})
|
||||
console.log("[");
|
||||
is.pipe(conv);
|
||||
// is.pipe(conv);
|
||||
}
|
||||
|
||||
function run(cmd, options) {
|
||||
if (cmd === "parse") {
|
||||
parse();
|
||||
} else if (cmd === "version") {
|
||||
console.log(pkg.version);
|
||||
} else {
|
||||
console.log("unknown command %s.", cmd);
|
||||
_showHelp(1);
|
||||
}
|
||||
}
|
||||
|
||||
function commandParser() {
|
||||
var parsedCmd = {
|
||||
"cmd": "parse",
|
||||
"options": {},
|
||||
"inputStream": process.stdin
|
||||
};
|
||||
|
||||
function parseObject(val, optional) {
|
||||
try {
|
||||
return JSON.parse(val);
|
||||
} catch (e) {
|
||||
if (optional) {
|
||||
return val;
|
||||
} else {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseBool(str, optName) {
|
||||
str = str.toLowerCase();
|
||||
if (str === "true" || str === "y") {
|
||||
return true;
|
||||
} else if (str === "false" || str === "n") {
|
||||
return false;
|
||||
}
|
||||
console.log("Unknown boolean value %s for parameter %s.", str, optName);
|
||||
_showHelp(1);
|
||||
}
|
||||
process.argv.slice(2).forEach(function (item) {
|
||||
if (item.indexOf("--") > -1) {
|
||||
var itemArr = item.split("=");
|
||||
var optName = itemArr[0];
|
||||
var key, val, type;
|
||||
if (!opts[optName]) {
|
||||
console.log("Option %s not supported.", optName);
|
||||
_showHelp(1);
|
||||
}
|
||||
key = optName.replace('--', '');
|
||||
val = itemArr[1] || '';
|
||||
type = opts[optName].type;
|
||||
if (type === "string") {
|
||||
parsedCmd.options[key] = val.toString();
|
||||
} else if (type === "boolean") {
|
||||
parsedCmd.options[key] = parseBool(val, optName);
|
||||
} else if (type === "number") {
|
||||
parsedCmd.options[key] = parseFloat(val);
|
||||
} else if (type === "object") {
|
||||
parsedCmd.options[key] = parseObject(val, false);
|
||||
} else if (type === "~object") {
|
||||
parsedCmd.options[key] = parseObject(val, true);
|
||||
} else {
|
||||
throw ({
|
||||
name: "UnimplementedException",
|
||||
message: "Option type parsing not implemented. See bin/options.json"
|
||||
});
|
||||
}
|
||||
} else if (cmds[item]) {
|
||||
parsedCmd.cmd = item;
|
||||
} else if (fs.existsSync(item)) {
|
||||
parsedCmd.inputStream = fs.createReadStream(item);
|
||||
} else {
|
||||
console.log("unknown parameter %s.", item);
|
||||
}
|
||||
});
|
||||
return parsedCmd;
|
||||
}
|
||||
process.stdin.setEncoding('utf8');
|
||||
parsedCmd = commandParser();
|
||||
run(parsedCmd.cmd, parsedCmd.options);
|
||||
}
|
||||
module.exports = csvtojson;
|
||||
if (!module.parent) {
|
||||
csvtojson();
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TrackService = void 0;
|
||||
const request_1 = require("../core/request");
|
||||
class TrackService {
|
||||
/**
|
||||
* Get all
|
||||
* Lists all tracks.
|
||||
* @result ResponseTrack
|
||||
* @throws ApiError
|
||||
*/
|
||||
static async trackControllerGetAll() {
|
||||
const result = await (0, request_1.request)({
|
||||
method: 'GET',
|
||||
path: `/api/tracks`,
|
||||
});
|
||||
return result.body;
|
||||
}
|
||||
/**
|
||||
* Post
|
||||
* Create a new track. <br> Please remember that the track's distance must be greater than 0.
|
||||
* @param requestBody CreateTrack
|
||||
* @result ResponseTrack
|
||||
* @throws ApiError
|
||||
*/
|
||||
static async trackControllerPost(requestBody) {
|
||||
const result = await (0, request_1.request)({
|
||||
method: 'POST',
|
||||
path: `/api/tracks`,
|
||||
body: requestBody,
|
||||
});
|
||||
return result.body;
|
||||
}
|
||||
/**
|
||||
* Get one
|
||||
* Lists all information about the track whose id got provided.
|
||||
* @param id
|
||||
* @result ResponseTrack
|
||||
* @throws ApiError
|
||||
*/
|
||||
static async trackControllerGetOne(id) {
|
||||
const result = await (0, request_1.request)({
|
||||
method: 'GET',
|
||||
path: `/api/tracks/${id}`,
|
||||
});
|
||||
return result.body;
|
||||
}
|
||||
/**
|
||||
* Put
|
||||
* Update the track whose id you provided. <br> Please remember that ids can't be changed.
|
||||
* @param id
|
||||
* @param requestBody UpdateTrack
|
||||
* @result ResponseTrack
|
||||
* @throws ApiError
|
||||
*/
|
||||
static async trackControllerPut(id, requestBody) {
|
||||
const result = await (0, request_1.request)({
|
||||
method: 'PUT',
|
||||
path: `/api/tracks/${id}`,
|
||||
body: requestBody,
|
||||
});
|
||||
return result.body;
|
||||
}
|
||||
/**
|
||||
* Remove
|
||||
* Delete the track whose id you provided. <br> If no track with this id exists it will just return 204(no content).
|
||||
* @param id
|
||||
* @param force
|
||||
* @result ResponseTrack
|
||||
* @result ResponseEmpty
|
||||
* @throws ApiError
|
||||
*/
|
||||
static async trackControllerRemove(id, force) {
|
||||
const result = await (0, request_1.request)({
|
||||
method: 'DELETE',
|
||||
path: `/api/tracks/${id}`,
|
||||
query: {
|
||||
'force': force,
|
||||
},
|
||||
});
|
||||
return result.body;
|
||||
}
|
||||
}
|
||||
exports.TrackService = TrackService;
|
||||
@@ -0,0 +1,5 @@
|
||||
import { OperatorFunction } from '../types';
|
||||
export declare function reduce<V, A = V>(accumulator: (acc: A | V, value: V, index: number) => A): OperatorFunction<V, V | A>;
|
||||
export declare function reduce<V, A>(accumulator: (acc: A, value: V, index: number) => A, seed: A): OperatorFunction<V, A>;
|
||||
export declare function reduce<V, A, S = A>(accumulator: (acc: A | S, value: V, index: number) => A, seed: S): OperatorFunction<V, A>;
|
||||
//# sourceMappingURL=reduce.d.ts.map
|
||||
Reference in New Issue
Block a user