source upload
This commit is contained in:
1804
contrib/mORMot/SyNode/core_modules/DevTools/Debugger.js
Normal file
1804
contrib/mORMot/SyNode/core_modules/DevTools/Debugger.js
Normal file
File diff suppressed because it is too large
Load Diff
133
contrib/mORMot/SyNode/core_modules/DevTools/DevToolsUtils.js
Normal file
133
contrib/mORMot/SyNode/core_modules/DevTools/DevToolsUtils.js
Normal file
@@ -0,0 +1,133 @@
|
||||
"use strict";
|
||||
let {logError} = process.binding('debugger');
|
||||
|
||||
/**
|
||||
* Turn the error |aError| into a string, without fail.
|
||||
*/
|
||||
function safeErrorString(aError) {
|
||||
try {
|
||||
let errorString = aError.toString();
|
||||
if (typeof errorString == "string") {
|
||||
// Attempt to attach a stack to |errorString|. If it throws an error, or
|
||||
// isn't a string, don't use it.
|
||||
try {
|
||||
if (aError.stack) {
|
||||
let stack = aError.stack.toString();
|
||||
if (typeof stack == "string") {
|
||||
errorString += "\nStack: " + stack;
|
||||
}
|
||||
}
|
||||
} catch (ee) { }
|
||||
|
||||
// Append additional line and column number information to the output,
|
||||
// since it might not be part of the stringified error.
|
||||
if (typeof aError.lineNumber == "number" && typeof aError.columnNumber == "number") {
|
||||
errorString += "Line: " + aError.lineNumber + ", column: " + aError.columnNumber;
|
||||
}
|
||||
|
||||
return errorString;
|
||||
}
|
||||
} catch (ee) { }
|
||||
|
||||
// We failed to find a good error description, so do the next best thing.
|
||||
return Object.prototype.toString.call(aError);
|
||||
};
|
||||
|
||||
/**
|
||||
* Report that |aWho| threw an exception, |aException|.
|
||||
*/
|
||||
export function reportException(aWho, aException) {
|
||||
let msg = aWho + " threw an exception: " + safeErrorString(aException);
|
||||
logError(msg);
|
||||
};
|
||||
|
||||
/**
|
||||
* Safely get the property value from a Debugger.Object for a given key. Walks
|
||||
* the prototype chain until the property is found.
|
||||
*
|
||||
* @param Debugger.Object aObject
|
||||
* The Debugger.Object to get the value from.
|
||||
* @param String aKey
|
||||
* The key to look for.
|
||||
* @return Any
|
||||
*/
|
||||
export function getProperty(aObj, aKey) {
|
||||
let root = aObj;
|
||||
try {
|
||||
do {
|
||||
const desc = aObj.getOwnPropertyDescriptor(aKey);
|
||||
if (desc) {
|
||||
if ("value" in desc) {
|
||||
return desc.value;
|
||||
}
|
||||
// Call the getter if it's safe.
|
||||
return hasSafeGetter(desc) ? desc.get.call(root).return : undefined;
|
||||
}
|
||||
aObj = aObj.proto;
|
||||
} while (aObj);
|
||||
} catch (e) {
|
||||
// If anything goes wrong report the error and return undefined.
|
||||
//exports.reportException("getProperty", e);
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determines if a descriptor has a getter which doesn't call into JavaScript.
|
||||
*
|
||||
* @param Object aDesc
|
||||
* The descriptor to check for a safe getter.
|
||||
* @return Boolean
|
||||
* Whether a safe getter was found.
|
||||
*/
|
||||
export function hasSafeGetter(aDesc) {
|
||||
// Scripted functions that are CCWs will not appear scripted until after
|
||||
// unwrapping.
|
||||
try {
|
||||
let fn = aDesc.get.unwrap();
|
||||
return fn && fn.callable && fn.class == "Function" && fn.script === undefined;
|
||||
} catch(e) {
|
||||
// Avoid exception 'Object in compartment marked as invisible to Debugger'
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// Calls the property with the given `name` on the given `object`, where
|
||||
// `name` is a string, and `object` a Debugger.Object instance.
|
||||
///
|
||||
// This function uses only the Debugger.Object API to call the property. It
|
||||
// avoids the use of unsafeDeference. This is useful for example in workers,
|
||||
// where unsafeDereference will return an opaque security wrapper to the
|
||||
// referent.
|
||||
export function callPropertyOnObject(object, name) {
|
||||
// Find the property.
|
||||
let descriptor;
|
||||
let proto = object;
|
||||
do {
|
||||
descriptor = proto.getOwnPropertyDescriptor(name);
|
||||
if (descriptor !== undefined) {
|
||||
break;
|
||||
}
|
||||
proto = proto.proto;
|
||||
} while (proto !== null);
|
||||
if (descriptor === undefined) {
|
||||
throw new Error("No such property");
|
||||
}
|
||||
let value = descriptor.value;
|
||||
if (typeof value !== "object" || value === null || !("callable" in value)) {
|
||||
throw new Error("Not a callable object.");
|
||||
}
|
||||
|
||||
// Call the property.
|
||||
let result = value.call(object);
|
||||
if (result === null) {
|
||||
throw new Error("Code was terminated.");
|
||||
}
|
||||
if ("throw" in result) {
|
||||
throw result.throw;
|
||||
}
|
||||
return result.return;
|
||||
}
|
||||
|
||||
|
||||
//exports.callPropertyOnObject = callPropertyOnObject;
|
@@ -0,0 +1,826 @@
|
||||
import * as DevToolsUtils from 'DevTools/DevToolsUtils.js';
|
||||
let {global} = process.binding('debugger');
|
||||
|
||||
const TYPED_ARRAY_CLASSES = ["Uint8Array", "Uint8ClampedArray", "Uint16Array",
|
||||
"Uint32Array", "Int8Array", "Int16Array", "Int32Array", "Float32Array",
|
||||
"Float64Array"];
|
||||
|
||||
|
||||
// Number of items to preview in objects, arrays, maps, sets, lists,
|
||||
// collections, etc.
|
||||
const OBJECT_PREVIEW_MAX_ITEMS = 10;
|
||||
|
||||
let _ObjectActorPreviewers = {
|
||||
String: [function (objectActor, grip) {
|
||||
return wrappedPrimitivePreviewer("String", String, objectActor, grip);
|
||||
}],
|
||||
|
||||
Boolean: [function (objectActor, grip) {
|
||||
return wrappedPrimitivePreviewer("Boolean", Boolean, objectActor, grip);
|
||||
}],
|
||||
|
||||
Number: [function (objectActor, grip) {
|
||||
return wrappedPrimitivePreviewer("Number", Number, objectActor, grip);
|
||||
}],
|
||||
|
||||
Function: [function (objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
if (_obj.name) {
|
||||
grip.name = _obj.name;
|
||||
}
|
||||
|
||||
if (_obj.displayName) {
|
||||
grip.displayName = _obj.displayName.substr(0, 500);
|
||||
}
|
||||
|
||||
if (_obj.parameterNames) {
|
||||
grip.parameterNames = _obj.parameterNames;
|
||||
}
|
||||
|
||||
// Check if the developer has added a de-facto standard displayName
|
||||
// property for us to use.
|
||||
let userDisplayName;
|
||||
try {
|
||||
userDisplayName = _obj.getOwnPropertyDescriptor("displayName");
|
||||
} catch (e) {
|
||||
// Calling getOwnPropertyDescriptor with displayName might throw
|
||||
// with "permission denied" errors for some functions.
|
||||
//dumpn(e);
|
||||
}
|
||||
|
||||
if (userDisplayName && typeof userDisplayName.value == "string" &&
|
||||
userDisplayName.value) {
|
||||
grip.userDisplayName = objectActor.getGrip(userDisplayName.value);
|
||||
}
|
||||
|
||||
//let dbgGlobal = hooks.getGlobalDebugObject();
|
||||
//if (dbgGlobal) {
|
||||
//let script = dbgGlobal.makeDebuggeeValue(_obj.unsafeDereference()).script;
|
||||
let script = _obj.script;
|
||||
if (script) {
|
||||
grip.location = {
|
||||
url: script.url,
|
||||
line: script.startLine
|
||||
};
|
||||
}
|
||||
//}
|
||||
|
||||
return true;
|
||||
}],
|
||||
|
||||
RegExp: [function (objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
// Avoid having any special preview for the RegExp.prototype itself.
|
||||
if (!_obj.proto || _obj.proto.class != "RegExp") {
|
||||
return false;
|
||||
}
|
||||
|
||||
let str = RegExp.prototype.toString.call(_obj.unsafeDereference());
|
||||
grip.displayString = objectActor.getGrip(str);
|
||||
return true;
|
||||
}],
|
||||
|
||||
Date: [function (objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
let time = Date.prototype.getTime.call(_obj.unsafeDereference());
|
||||
|
||||
grip.preview = {
|
||||
timestamp: objectActor.getGrip(time)
|
||||
};
|
||||
return true;
|
||||
}],
|
||||
|
||||
Array: [function (objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
let length = DevToolsUtils.getProperty(_obj, "length");
|
||||
if (typeof length != "number") {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "ArrayLike",
|
||||
length: length
|
||||
};
|
||||
|
||||
if (objectActor.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let raw = _obj.unsafeDereference();
|
||||
let items = grip.preview.items = [];
|
||||
|
||||
for (let i = 0; i < length; ++i) {
|
||||
// Array Xrays filter out various possibly-unsafe properties (like
|
||||
// functions, and claim that the value is undefined instead. This
|
||||
// is generally the right thing for privileged code accessing untrusted
|
||||
// objects, but quite confusing for Object previews. So we manually
|
||||
// override this protection by waiving Xrays on the array, and re-applying
|
||||
// Xrays on any indexed value props that we pull off of it.
|
||||
//let desc = Object.getOwnPropertyDescriptor(Cu.waiveXrays(raw), i);
|
||||
let desc = Object.getOwnPropertyDescriptor(raw, i);
|
||||
if (desc && !desc.get && !desc.set) {
|
||||
//let value = Cu.unwaiveXrays(desc.value);
|
||||
let value = desc.value;
|
||||
value = makeDebuggeeValueIfNeeded(_obj, value);
|
||||
items.push(objectActor.getGrip(value));
|
||||
} else {
|
||||
items.push(null);
|
||||
}
|
||||
|
||||
if (items.length == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}],
|
||||
|
||||
Set: [function (objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
let size = DevToolsUtils.getProperty(_obj, "size");
|
||||
if (typeof size != "number") {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "ArrayLike",
|
||||
length: size
|
||||
};
|
||||
|
||||
// Avoid recursive object grips.
|
||||
if (objectActor.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let raw = _obj.unsafeDereference();
|
||||
let items = grip.preview.items = [];
|
||||
// We currently lack XrayWrappers for Set, so when we iterate over
|
||||
// the values, the temporary iterator objects get created in the target
|
||||
// compartment. However, we _do_ have Xrays to Object now, so we end up
|
||||
// Xraying those temporary objects, and filtering access to |it.value|
|
||||
// based on whether or not it's Xrayable and/or callable, which breaks
|
||||
// the for/of iteration.
|
||||
//
|
||||
// This code is designed to handle untrusted objects, so we can safely
|
||||
// waive Xrays on the iterable, and relying on the Debugger machinery to
|
||||
// make sure we handle the resulting objects carefully.
|
||||
//for (let item of Cu.waiveXrays(Set.prototype.values.call(raw))) {
|
||||
for (let item of Set.prototype.values.call(raw)) {
|
||||
//item = Cu.unwaiveXrays(item);
|
||||
item = makeDebuggeeValueIfNeeded(_obj, item);
|
||||
items.push(objectActor.getGrip(item));
|
||||
if (items.length == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}],
|
||||
/*WeakSet: [function(objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
let raw = _obj.unsafeDereference();
|
||||
|
||||
// We currently lack XrayWrappers for WeakSet, so when we iterate over
|
||||
// the values, the temporary iterator objects get created in the target
|
||||
// compartment. However, we _do_ have Xrays to Object now, so we end up
|
||||
// Xraying those temporary objects, and filtering access to |it.value|
|
||||
// based on whether or not it's Xrayable and/or callable, which breaks
|
||||
// the for/of iteration.
|
||||
//
|
||||
// This code is designed to handle untrusted objects, so we can safely
|
||||
// waive Xrays on the iterable, and relying on the Debugger machinery to
|
||||
// make sure we handle the resulting objects carefully.
|
||||
//let keys = Cu.waiveXrays(ThreadSafeChromeUtils.nondeterministicGetWeakSetKeys(raw));
|
||||
let keys = Cu.waiveXrays(ThreadSafeChromeUtils.nondeterministicGetWeakSetKeys(raw));
|
||||
grip.preview = {
|
||||
kind: "ArrayLike",
|
||||
length: keys.length
|
||||
};
|
||||
|
||||
//// Avoid recursive object grips.
|
||||
//if (hooks.getGripDepth() > 1) {
|
||||
//return true;
|
||||
//}
|
||||
|
||||
let items = grip.preview.items = [];
|
||||
for (let item of keys) {
|
||||
//item = Cu.unwaiveXrays(item);
|
||||
item = makeDebuggeeValueIfNeeded(obj, item);
|
||||
items.push(hooks.createValueGrip(item));
|
||||
if (items.length == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}],*/
|
||||
|
||||
Map: [function (objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
let size = DevToolsUtils.getProperty(_obj, "size");
|
||||
if (typeof size != "number") {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "MapLike",
|
||||
size: size
|
||||
};
|
||||
|
||||
if (objectActor.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let raw = _obj.unsafeDereference();
|
||||
let entries = grip.preview.entries = [];
|
||||
// Iterating over a Map via .entries goes through various intermediate
|
||||
// objects - an Iterator object, then a 2-element Array object, then the
|
||||
// actual values we care about. We don't have Xrays to Iterator objects,
|
||||
// so we get Opaque wrappers for them. And even though we have Xrays to
|
||||
// Arrays, the semantics often deny access to the entires based on the
|
||||
// nature of the values. So we need waive Xrays for the iterator object
|
||||
// and the tupes, and then re-apply them on the underlying values until
|
||||
// we fix bug 1023984.
|
||||
//
|
||||
// Even then though, we might want to continue waiving Xrays here for the
|
||||
// same reason we do so for Arrays above - this filtering behavior is likely
|
||||
// to be more confusing than beneficial in the case of Object previews.
|
||||
//for (let keyValuePair of Cu.waiveXrays(Map.prototype.entries.call(raw))) {
|
||||
for (let keyValuePair of Map.prototype.entries.call(raw)) {
|
||||
//let key = Cu.unwaiveXrays(keyValuePair[0]);
|
||||
let key = keyValuePair[0];
|
||||
//let value = Cu.unwaiveXrays(keyValuePair[1]);
|
||||
let value = keyValuePair[1];
|
||||
key = makeDebuggeeValueIfNeeded(_obj, key);
|
||||
value = makeDebuggeeValueIfNeeded(_obj, value);
|
||||
entries.push([objectActor.getGrip(key),
|
||||
objectActor.getGrip(value)]);
|
||||
if (entries.length == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}]/*,
|
||||
|
||||
WeakMap: [function({obj, hooks}, grip) {
|
||||
let raw = obj.unsafeDereference();
|
||||
// We currently lack XrayWrappers for WeakMap, so when we iterate over
|
||||
// the values, the temporary iterator objects get created in the target
|
||||
// compartment. However, we _do_ have Xrays to Object now, so we end up
|
||||
// Xraying those temporary objects, and filtering access to |it.value|
|
||||
// based on whether or not it's Xrayable and/or callable, which breaks
|
||||
// the for/of iteration.
|
||||
//
|
||||
// This code is designed to handle untrusted objects, so we can safely
|
||||
// waive Xrays on the iterable, and relying on the Debugger machinery to
|
||||
// make sure we handle the resulting objects carefully.
|
||||
let rawEntries = Cu.waiveXrays(ThreadSafeChromeUtils.nondeterministicGetWeakMapKeys(raw));
|
||||
|
||||
grip.preview = {
|
||||
kind: "MapLike",
|
||||
size: rawEntries.length,
|
||||
};
|
||||
|
||||
if (hooks.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let entries = grip.preview.entries = [];
|
||||
for (let key of rawEntries) {
|
||||
let value = Cu.unwaiveXrays(WeakMap.prototype.get.call(raw, key));
|
||||
key = Cu.unwaiveXrays(key);
|
||||
key = makeDebuggeeValueIfNeeded(obj, key);
|
||||
value = makeDebuggeeValueIfNeeded(obj, value);
|
||||
entries.push([hooks.createValueGrip(key),
|
||||
hooks.createValueGrip(value)]);
|
||||
if (entries.length == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}],
|
||||
|
||||
DOMStringMap: [function({obj, hooks}, grip, rawObj) {
|
||||
if (!rawObj) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let keys = obj.getOwnPropertyNames();
|
||||
grip.preview = {
|
||||
kind: "MapLike",
|
||||
size: keys.length,
|
||||
};
|
||||
|
||||
if (hooks.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let entries = grip.preview.entries = [];
|
||||
for (let key of keys) {
|
||||
let value = makeDebuggeeValueIfNeeded(obj, rawObj[key]);
|
||||
entries.push([key, hooks.createValueGrip(value)]);
|
||||
if (entries.length == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}],*/
|
||||
};
|
||||
|
||||
/**
|
||||
* Generic previewer for classes wrapping primitives, like String,
|
||||
* Number and Boolean.
|
||||
*
|
||||
* @param string className
|
||||
* Class name to expect.
|
||||
* @param object classObj
|
||||
* The class to expect, eg. String. The valueOf() method of the class is
|
||||
* invoked on the given object.
|
||||
* @param ObjectActor objectActor
|
||||
* The object actor
|
||||
* @param Object grip
|
||||
* The result grip to fill in
|
||||
* @return Booolean true if the object was handled, false otherwise
|
||||
*/
|
||||
function wrappedPrimitivePreviewer(className, classObj, objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
|
||||
if (!_obj.proto || _obj.proto.class != className) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let raw = _obj.unsafeDereference();
|
||||
let v = null;
|
||||
try {
|
||||
v = classObj.prototype.valueOf.call(raw);
|
||||
} catch (ex) {
|
||||
// valueOf() can throw if the raw JS object is "misbehaved".
|
||||
return false;
|
||||
}
|
||||
|
||||
if (v === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let canHandle = GenericObject(objectActor, grip, className === "String");
|
||||
if (!canHandle) {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview.wrappedValue = objectActor.getGrip(makeDebuggeeValueIfNeeded(_obj, v));
|
||||
return true;
|
||||
}
|
||||
|
||||
function GenericObject(objectActor, grip, specialStringBehavior = false) {
|
||||
let {_obj} = objectActor;
|
||||
if (grip.preview || grip.displayString || objectActor.getGripDepth() > 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let i = 0, names = [];
|
||||
let preview = grip.preview = {
|
||||
kind: "Object",
|
||||
ownProperties: {}//Object.create(null)
|
||||
};
|
||||
|
||||
try {
|
||||
names = _obj.getOwnPropertyNames();
|
||||
} catch (ex) {
|
||||
// Calling getOwnPropertyNames() on some wrapped native prototypes is not
|
||||
// allowed: "cannot modify properties of a WrappedNative". See bug 952093.
|
||||
}
|
||||
|
||||
preview.ownPropertiesLength = names.length;
|
||||
|
||||
let length;
|
||||
if (specialStringBehavior) {
|
||||
length = DevToolsUtils.getProperty(_obj, "length");
|
||||
if (typeof length != "number") {
|
||||
specialStringBehavior = false;
|
||||
}
|
||||
}
|
||||
|
||||
for (let name of names) {
|
||||
if (specialStringBehavior && /^[0-9]+$/.test(name)) {
|
||||
let num = parseInt(name, 10);
|
||||
if (num.toString() === name && num >= 0 && num < length) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let desc = objectActor._propertyDescriptor(name, true);
|
||||
if (!desc) {
|
||||
continue;
|
||||
}
|
||||
|
||||
preview.ownProperties[name] = desc;
|
||||
if (++i == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (i < OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
preview.safeGetterValues = objectActor._findSafeGetterValues(
|
||||
Object.keys(preview.ownProperties),
|
||||
OBJECT_PREVIEW_MAX_ITEMS - i);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a debuggee value for the given object, if needed. Primitive values
|
||||
* are left the same.
|
||||
*
|
||||
* Use case: you have a raw JS object (after unsafe dereference) and you want to
|
||||
* send it to the client. In that case you need to use an ObjectActor which
|
||||
* requires a debuggee value. The Debugger.Object.prototype.makeDebuggeeValue()
|
||||
* method works only for JS objects and functions.
|
||||
*
|
||||
* @param Debugger.Object obj
|
||||
* @param any value
|
||||
* @return object
|
||||
*/
|
||||
function makeDebuggeeValueIfNeeded(obj, value) {
|
||||
if (value && (typeof value == "object" || typeof value == "function")) {
|
||||
return obj.makeDebuggeeValue(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
// Preview functions that do not rely on the object class.
|
||||
_ObjectActorPreviewers.Object = [
|
||||
function TypedArray(objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
if (TYPED_ARRAY_CLASSES.indexOf(_obj.class) == -1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let length = DevToolsUtils.getProperty(_obj, "length");
|
||||
if (typeof length != "number") {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "ArrayLike",
|
||||
length: length
|
||||
};
|
||||
|
||||
if (objectActor.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let raw = _obj.unsafeDereference();
|
||||
//let global = Cu.getGlobalForObject(DebuggerServer);
|
||||
|
||||
let classProto = global[_obj.class].prototype;
|
||||
// The Xray machinery for TypedArrays denies indexed access on the grounds
|
||||
// that it's slow, and advises callers to do a structured clone instead.
|
||||
//let safeView = Cu.cloneInto(classProto.subarray.call(raw, 0,
|
||||
// OBJECT_PREVIEW_MAX_ITEMS), global);
|
||||
let safeView = classProto.subarray.call(raw, 0,
|
||||
OBJECT_PREVIEW_MAX_ITEMS);
|
||||
let items = grip.preview.items = [];
|
||||
for (let i = 0; i < safeView.length; i++) {
|
||||
items.push(safeView[i]);
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
function Error(objectActor, grip) {
|
||||
let {_obj} = objectActor;
|
||||
switch (_obj.class) {
|
||||
case "Error":
|
||||
case "EvalError":
|
||||
case "RangeError":
|
||||
case "ReferenceError":
|
||||
case "SyntaxError":
|
||||
case "TypeError":
|
||||
case "URIError":
|
||||
let name = DevToolsUtils.getProperty(_obj, "name");
|
||||
let msg = DevToolsUtils.getProperty(_obj, "message");
|
||||
let stack = DevToolsUtils.getProperty(_obj, "stack");
|
||||
let fileName = DevToolsUtils.getProperty(_obj, "fileName");
|
||||
let lineNumber = DevToolsUtils.getProperty(_obj, "lineNumber");
|
||||
let columnNumber = DevToolsUtils.getProperty(_obj, "columnNumber");
|
||||
grip.preview = {
|
||||
kind: "Error",
|
||||
name: objectActor.getGrip(name),
|
||||
message: objectActor.getGrip(msg),
|
||||
stack: objectActor.getGrip(stack),
|
||||
fileName: objectActor.getGrip(fileName),
|
||||
lineNumber: objectActor.getGrip(lineNumber),
|
||||
columnNumber: objectActor.getGrip(columnNumber)
|
||||
};
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/*function CSSMediaRule({obj, hooks}, grip, rawObj) {
|
||||
if (isWorker || !rawObj || !(rawObj instanceof Ci.nsIDOMCSSMediaRule)) {
|
||||
return false;
|
||||
}
|
||||
grip.preview = {
|
||||
kind: "ObjectWithText",
|
||||
text: hooks.createValueGrip(rawObj.conditionText),
|
||||
};
|
||||
return true;
|
||||
},
|
||||
|
||||
function CSSStyleRule({obj, hooks}, grip, rawObj) {
|
||||
if (isWorker || !rawObj || !(rawObj instanceof Ci.nsIDOMCSSStyleRule)) {
|
||||
return false;
|
||||
}
|
||||
grip.preview = {
|
||||
kind: "ObjectWithText",
|
||||
text: hooks.createValueGrip(rawObj.selectorText),
|
||||
};
|
||||
return true;
|
||||
},
|
||||
|
||||
function ObjectWithURL({obj, hooks}, grip, rawObj) {
|
||||
if (isWorker || !rawObj || !(rawObj instanceof Ci.nsIDOMCSSImportRule ||
|
||||
rawObj instanceof Ci.nsIDOMCSSStyleSheet ||
|
||||
rawObj instanceof Ci.nsIDOMLocation ||
|
||||
rawObj instanceof Ci.nsIDOMWindow)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let url;
|
||||
if (rawObj instanceof Ci.nsIDOMWindow && rawObj.location) {
|
||||
url = rawObj.location.href;
|
||||
} else if (rawObj.href) {
|
||||
url = rawObj.href;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "ObjectWithURL",
|
||||
url: hooks.createValueGrip(url),
|
||||
};
|
||||
|
||||
return true;
|
||||
},*/
|
||||
|
||||
/*function ArrayLike(objectActor, grip, rawObj) {
|
||||
let {_obj} = objectActor;
|
||||
if (isWorker || !rawObj ||
|
||||
obj.class != "DOMStringList" &&
|
||||
obj.class != "DOMTokenList" && !(rawObj instanceof Ci.nsIDOMMozNamedAttrMap ||
|
||||
rawObj instanceof Ci.nsIDOMCSSRuleList ||
|
||||
rawObj instanceof Ci.nsIDOMCSSValueList ||
|
||||
rawObj instanceof Ci.nsIDOMFileList ||
|
||||
rawObj instanceof Ci.nsIDOMFontFaceList ||
|
||||
rawObj instanceof Ci.nsIDOMMediaList ||
|
||||
rawObj instanceof Ci.nsIDOMNodeList ||
|
||||
rawObj instanceof Ci.nsIDOMStyleSheetList)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof rawObj.length != "number") {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "ArrayLike",
|
||||
length: rawObj.length,
|
||||
};
|
||||
|
||||
if (hooks.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let items = grip.preview.items = [];
|
||||
|
||||
for (let i = 0; i < rawObj.length &&
|
||||
items.length < OBJECT_PREVIEW_MAX_ITEMS; i++) {
|
||||
let value = makeDebuggeeValueIfNeeded(obj, rawObj[i]);
|
||||
items.push(hooks.createValueGrip(value));
|
||||
}
|
||||
|
||||
return true;
|
||||
},*/
|
||||
|
||||
/*function CSSStyleDeclaration({obj, hooks}, grip, rawObj) {
|
||||
if (isWorker || !rawObj || !(rawObj instanceof Ci.nsIDOMCSSStyleDeclaration)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "MapLike",
|
||||
size: rawObj.length,
|
||||
};
|
||||
|
||||
let entries = grip.preview.entries = [];
|
||||
|
||||
for (let i = 0; i < OBJECT_PREVIEW_MAX_ITEMS &&
|
||||
i < rawObj.length; i++) {
|
||||
let prop = rawObj[i];
|
||||
let value = rawObj.getPropertyValue(prop);
|
||||
entries.push([prop, hooks.createValueGrip(value)]);
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
function DOMNode({obj, hooks}, grip, rawObj) {
|
||||
if (isWorker || obj.class == "Object" || !rawObj || !(rawObj instanceof Ci.nsIDOMNode)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let preview = grip.preview = {
|
||||
kind: "DOMNode",
|
||||
nodeType: rawObj.nodeType,
|
||||
nodeName: rawObj.nodeName,
|
||||
};
|
||||
|
||||
if (rawObj instanceof Ci.nsIDOMDocument && rawObj.location) {
|
||||
preview.location = hooks.createValueGrip(rawObj.location.href);
|
||||
} else if (rawObj instanceof Ci.nsIDOMDocumentFragment) {
|
||||
preview.childNodesLength = rawObj.childNodes.length;
|
||||
|
||||
if (hooks.getGripDepth() < 2) {
|
||||
preview.childNodes = [];
|
||||
for (let node of rawObj.childNodes) {
|
||||
let actor = hooks.createValueGrip(obj.makeDebuggeeValue(node));
|
||||
preview.childNodes.push(actor);
|
||||
if (preview.childNodes.length == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (rawObj instanceof Ci.nsIDOMElement) {
|
||||
// Add preview for DOM element attributes.
|
||||
if (rawObj instanceof Ci.nsIDOMHTMLElement) {
|
||||
preview.nodeName = preview.nodeName.toLowerCase();
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
preview.attributes = {};
|
||||
preview.attributesLength = rawObj.attributes.length;
|
||||
for (let attr of rawObj.attributes) {
|
||||
preview.attributes[attr.nodeName] = hooks.createValueGrip(attr.value);
|
||||
if (++i == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (rawObj instanceof Ci.nsIDOMAttr) {
|
||||
preview.value = hooks.createValueGrip(rawObj.value);
|
||||
} else if (rawObj instanceof Ci.nsIDOMText ||
|
||||
rawObj instanceof Ci.nsIDOMComment) {
|
||||
preview.textContent = hooks.createValueGrip(rawObj.textContent);
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
function DOMEvent({obj, hooks}, grip, rawObj) {
|
||||
if (isWorker || !rawObj || !(rawObj instanceof Ci.nsIDOMEvent)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let preview = grip.preview = {
|
||||
kind: "DOMEvent",
|
||||
type: rawObj.type,
|
||||
properties: Object.create(null),
|
||||
};
|
||||
|
||||
if (hooks.getGripDepth() < 2) {
|
||||
let target = obj.makeDebuggeeValue(rawObj.target);
|
||||
preview.target = hooks.createValueGrip(target);
|
||||
}
|
||||
|
||||
let props = [];
|
||||
if (rawObj instanceof Ci.nsIDOMMouseEvent) {
|
||||
props.push("buttons", "clientX", "clientY", "layerX", "layerY");
|
||||
} else if (rawObj instanceof Ci.nsIDOMKeyEvent) {
|
||||
let modifiers = [];
|
||||
if (rawObj.altKey) {
|
||||
modifiers.push("Alt");
|
||||
}
|
||||
if (rawObj.ctrlKey) {
|
||||
modifiers.push("Control");
|
||||
}
|
||||
if (rawObj.metaKey) {
|
||||
modifiers.push("Meta");
|
||||
}
|
||||
if (rawObj.shiftKey) {
|
||||
modifiers.push("Shift");
|
||||
}
|
||||
preview.eventKind = "key";
|
||||
preview.modifiers = modifiers;
|
||||
|
||||
props.push("key", "charCode", "keyCode");
|
||||
} else if (rawObj instanceof Ci.nsIDOMTransitionEvent) {
|
||||
props.push("propertyName", "pseudoElement");
|
||||
} else if (rawObj instanceof Ci.nsIDOMAnimationEvent) {
|
||||
props.push("animationName", "pseudoElement");
|
||||
} else if (rawObj instanceof Ci.nsIDOMClipboardEvent) {
|
||||
props.push("clipboardData");
|
||||
}
|
||||
|
||||
// Add event-specific properties.
|
||||
for (let prop of props) {
|
||||
let value = rawObj[prop];
|
||||
if (value && (typeof value == "object" || typeof value == "function")) {
|
||||
// Skip properties pointing to objects.
|
||||
if (hooks.getGripDepth() > 1) {
|
||||
continue;
|
||||
}
|
||||
value = obj.makeDebuggeeValue(value);
|
||||
}
|
||||
preview.properties[prop] = hooks.createValueGrip(value);
|
||||
}
|
||||
|
||||
// Add any properties we find on the event object.
|
||||
if (!props.length) {
|
||||
let i = 0;
|
||||
for (let prop in rawObj) {
|
||||
let value = rawObj[prop];
|
||||
if (prop == "target" || prop == "type" || value === null ||
|
||||
typeof value == "function") {
|
||||
continue;
|
||||
}
|
||||
if (value && typeof value == "object") {
|
||||
if (hooks.getGripDepth() > 1) {
|
||||
continue;
|
||||
}
|
||||
value = obj.makeDebuggeeValue(value);
|
||||
}
|
||||
preview.properties[prop] = hooks.createValueGrip(value);
|
||||
if (++i == OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
function DOMException({obj, hooks}, grip, rawObj) {
|
||||
if (isWorker || !rawObj || !(rawObj instanceof Ci.nsIDOMDOMException)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "DOMException",
|
||||
name: hooks.createValueGrip(rawObj.name),
|
||||
message: hooks.createValueGrip(rawObj.message),
|
||||
code: hooks.createValueGrip(rawObj.code),
|
||||
result: hooks.createValueGrip(rawObj.result),
|
||||
filename: hooks.createValueGrip(rawObj.filename),
|
||||
lineNumber: hooks.createValueGrip(rawObj.lineNumber),
|
||||
columnNumber: hooks.createValueGrip(rawObj.columnNumber),
|
||||
};
|
||||
|
||||
return true;
|
||||
},*/
|
||||
|
||||
/*function PseudoArray({obj, hooks}, grip, rawObj) {
|
||||
let length = 0;
|
||||
|
||||
// Making sure all keys are numbers from 0 to length-1
|
||||
let keys = obj.getOwnPropertyNames();
|
||||
if (keys.length == 0) {
|
||||
return false;
|
||||
}
|
||||
for (let key of keys) {
|
||||
if (isNaN(key) || key != length++) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
grip.preview = {
|
||||
kind: "ArrayLike",
|
||||
length: length,
|
||||
};
|
||||
|
||||
// Avoid recursive object grips.
|
||||
if (hooks.getGripDepth() > 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let items = grip.preview.items = [];
|
||||
|
||||
let i = 0;
|
||||
for (let key of keys) {
|
||||
if (rawObj.hasOwnProperty(key) && i++ < OBJECT_PREVIEW_MAX_ITEMS) {
|
||||
let value = makeDebuggeeValueIfNeeded(obj, rawObj[key]);
|
||||
items.push(hooks.createValueGrip(value));
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
},*/
|
||||
|
||||
GenericObject
|
||||
];
|
||||
|
||||
export const ObjectActorPreviewers = _ObjectActorPreviewers;
|
@@ -0,0 +1,538 @@
|
||||
"use strict";
|
||||
import * as DevToolsUtils from 'DevTools/DevToolsUtils.js';
|
||||
|
||||
//todo
|
||||
//if (!isWorker) {
|
||||
// loader.lazyImporter(this, "Parser", "resource://devtools/shared/Parser.jsm");
|
||||
//}
|
||||
|
||||
|
||||
// Provide an easy way to bail out of even attempting an autocompletion
|
||||
// if an object has way too many properties. Protects against large objects
|
||||
// with numeric values that wouldn't be tallied towards MAX_AUTOCOMPLETIONS.
|
||||
export const MAX_AUTOCOMPLETE_ATTEMPTS = 100000;
|
||||
// Prevent iterating over too many properties during autocomplete suggestions.
|
||||
export const MAX_AUTOCOMPLETIONS = 1500;
|
||||
|
||||
const STATE_NORMAL = 0;
|
||||
const STATE_QUOTE = 2;
|
||||
const STATE_DQUOTE = 3;
|
||||
|
||||
const OPEN_BODY = "{[(".split("");
|
||||
const CLOSE_BODY = "}])".split("");
|
||||
const OPEN_CLOSE_BODY = {
|
||||
"{": "}",
|
||||
"[": "]",
|
||||
"(": ")"
|
||||
};
|
||||
|
||||
function hasArrayIndex(str) {
|
||||
return /\[\d+\]$/.test(str);
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyses a given string to find the last statement that is interesting for
|
||||
* later completion.
|
||||
*
|
||||
* @param string aStr
|
||||
* A string to analyse.
|
||||
*
|
||||
* @returns object
|
||||
* If there was an error in the string detected, then a object like
|
||||
*
|
||||
* { err: "ErrorMesssage" }
|
||||
*
|
||||
* is returned, otherwise a object like
|
||||
*
|
||||
* {
|
||||
* state: STATE_NORMAL|STATE_QUOTE|STATE_DQUOTE,
|
||||
* startPos: index of where the last statement begins
|
||||
* }
|
||||
*/
|
||||
function findCompletionBeginning(aStr) {
|
||||
let bodyStack = [];
|
||||
|
||||
let state = STATE_NORMAL;
|
||||
let start = 0;
|
||||
let c;
|
||||
for (let i = 0; i < aStr.length; i++) {
|
||||
c = aStr[i];
|
||||
|
||||
switch (state) {
|
||||
// Normal JS state.
|
||||
case STATE_NORMAL:
|
||||
if (c == '"') {
|
||||
state = STATE_DQUOTE;
|
||||
}
|
||||
else if (c == "'") {
|
||||
state = STATE_QUOTE;
|
||||
}
|
||||
else if (c == ";") {
|
||||
start = i + 1;
|
||||
}
|
||||
else if (c == " ") {
|
||||
start = i + 1;
|
||||
}
|
||||
else if (OPEN_BODY.indexOf(c) != -1) {
|
||||
bodyStack.push({
|
||||
token: c,
|
||||
start: start
|
||||
});
|
||||
start = i + 1;
|
||||
}
|
||||
else if (CLOSE_BODY.indexOf(c) != -1) {
|
||||
var last = bodyStack.pop();
|
||||
if (!last || OPEN_CLOSE_BODY[last.token] != c) {
|
||||
return {
|
||||
err: "syntax error"
|
||||
};
|
||||
}
|
||||
if (c == "}") {
|
||||
start = i + 1;
|
||||
}
|
||||
else {
|
||||
start = last.start;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
// Double quote state > " <
|
||||
case STATE_DQUOTE:
|
||||
if (c == "\\") {
|
||||
i++;
|
||||
}
|
||||
else if (c == "\n") {
|
||||
return {
|
||||
err: "unterminated string literal"
|
||||
};
|
||||
}
|
||||
else if (c == '"') {
|
||||
state = STATE_NORMAL;
|
||||
}
|
||||
break;
|
||||
|
||||
// Single quote state > ' <
|
||||
case STATE_QUOTE:
|
||||
if (c == "\\") {
|
||||
i++;
|
||||
}
|
||||
else if (c == "\n") {
|
||||
return {
|
||||
err: "unterminated string literal"
|
||||
};
|
||||
}
|
||||
else if (c == "'") {
|
||||
state = STATE_NORMAL;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
state: state,
|
||||
startPos: start
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a list of properties, that are possible matches based on the passed
|
||||
* Debugger.Environment/Debugger.Object and inputValue.
|
||||
*
|
||||
* @param object aDbgObject
|
||||
* When the debugger is not paused this Debugger.Object wraps the scope for autocompletion.
|
||||
* It is null if the debugger is paused.
|
||||
* @param object anEnvironment
|
||||
* When the debugger is paused this Debugger.Environment is the scope for autocompletion.
|
||||
* It is null if the debugger is not paused.
|
||||
* @param string aInputValue
|
||||
* Value that should be completed.
|
||||
* @param number [aCursor=aInputValue.length]
|
||||
* Optional offset in the input where the cursor is located. If this is
|
||||
* omitted then the cursor is assumed to be at the end of the input
|
||||
* value.
|
||||
* @returns null or object
|
||||
* If no completion valued could be computed, null is returned,
|
||||
* otherwise a object with the following form is returned:
|
||||
* {
|
||||
* matches: [ string, string, string ],
|
||||
* matchProp: Last part of the inputValue that was used to find
|
||||
* the matches-strings.
|
||||
* }
|
||||
*/
|
||||
export function JSPropertyProvider(aDbgObject, anEnvironment, aInputValue, aCursor) {
|
||||
if (aCursor === undefined) {
|
||||
aCursor = aInputValue.length;
|
||||
}
|
||||
|
||||
let inputValue = aInputValue.substring(0, aCursor);
|
||||
|
||||
// Analyse the inputValue and find the beginning of the last part that
|
||||
// should be completed.
|
||||
let beginning = findCompletionBeginning(inputValue);
|
||||
|
||||
// There was an error analysing the string.
|
||||
if (beginning.err) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If the current state is not STATE_NORMAL, then we are inside of an string
|
||||
// which means that no completion is possible.
|
||||
if (beginning.state != STATE_NORMAL) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let completionPart = inputValue.substring(beginning.startPos);
|
||||
let lastDot = completionPart.lastIndexOf(".");
|
||||
|
||||
// Don't complete on just an empty string.
|
||||
if (completionPart.trim() == "") {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Catch literals like [1,2,3] or "foo" and return the matches from
|
||||
// their prototypes.
|
||||
// Don't run this is a worker, migrating to acorn should allow this
|
||||
// to run in a worker - Bug 1217198.
|
||||
//todo
|
||||
/*if (!isWorker && lastDot > 0) {
|
||||
let parser = new Parser();
|
||||
parser.logExceptions = false;
|
||||
let syntaxTree = parser.get(completionPart.slice(0, lastDot));
|
||||
let lastTree = syntaxTree.getLastSyntaxTree();
|
||||
let lastBody = lastTree && lastTree.AST.body[lastTree.AST.body.length - 1];
|
||||
|
||||
// Finding the last expression since we've sliced up until the dot.
|
||||
// If there were parse errors this won't exist.
|
||||
if (lastBody) {
|
||||
let expression = lastBody.expression;
|
||||
let matchProp = completionPart.slice(lastDot + 1);
|
||||
if (expression.type === "ArrayExpression") {
|
||||
return getMatchedProps(Array.prototype, matchProp);
|
||||
} else if (expression.type === "Literal" &&
|
||||
(typeof expression.value === "string")) {
|
||||
return getMatchedProps(String.prototype, matchProp);
|
||||
}
|
||||
}
|
||||
}*/
|
||||
|
||||
// We are completing a variable / a property lookup.
|
||||
let properties = completionPart.split(".");
|
||||
let matchProp = properties.pop().trimLeft();
|
||||
let obj = aDbgObject;
|
||||
|
||||
// The first property must be found in the environment of the paused debugger
|
||||
// or of the global lexical scope.
|
||||
let env = anEnvironment || obj.asEnvironment();
|
||||
|
||||
if (properties.length === 0) {
|
||||
return getMatchedPropsInEnvironment(env, matchProp);
|
||||
}
|
||||
|
||||
let firstProp = properties.shift().trim();
|
||||
if (firstProp === "this") {
|
||||
// Special case for 'this' - try to get the Object from the Environment.
|
||||
// No problem if it throws, we will just not autocomplete.
|
||||
try {
|
||||
obj = env.object;
|
||||
} catch (e) {
|
||||
}
|
||||
}
|
||||
else if (hasArrayIndex(firstProp)) {
|
||||
obj = getArrayMemberProperty(null, env, firstProp);
|
||||
} else {
|
||||
obj = getVariableInEnvironment(env, firstProp);
|
||||
}
|
||||
|
||||
if (!isObjectUsable(obj)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// We get the rest of the properties recursively starting from the Debugger.Object
|
||||
// that wraps the first property
|
||||
for (let i = 0; i < properties.length; i++) {
|
||||
let prop = properties[i].trim();
|
||||
if (!prop) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (hasArrayIndex(prop)) {
|
||||
// The property to autocomplete is a member of array. For example
|
||||
// list[i][j]..[n]. Traverse the array to get the actual element.
|
||||
obj = getArrayMemberProperty(obj, null, prop);
|
||||
}
|
||||
else {
|
||||
obj = DevToolsUtils.getProperty(obj, prop);
|
||||
}
|
||||
|
||||
if (!isObjectUsable(obj)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// If the final property is a primitive
|
||||
if (typeof obj != "object") {
|
||||
return getMatchedProps(obj, matchProp);
|
||||
}
|
||||
|
||||
return getMatchedPropsInDbgObject(obj, matchProp);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the array member of aObj for the given aProp. For example, given
|
||||
* aProp='list[0][1]' the element at [0][1] of aObj.list is returned.
|
||||
*
|
||||
* @param object aObj
|
||||
* The object to operate on. Should be null if aEnv is passed.
|
||||
* @param object aEnv
|
||||
* The Environment to operate in. Should be null if aObj is passed.
|
||||
* @param string aProp
|
||||
* The property to return.
|
||||
* @return null or Object
|
||||
* Returns null if the property couldn't be located. Otherwise the array
|
||||
* member identified by aProp.
|
||||
*/
|
||||
function getArrayMemberProperty(aObj, aEnv, aProp) {
|
||||
// First get the array.
|
||||
let obj = aObj;
|
||||
let propWithoutIndices = aProp.substr(0, aProp.indexOf("["));
|
||||
|
||||
if (aEnv) {
|
||||
obj = getVariableInEnvironment(aEnv, propWithoutIndices);
|
||||
} else {
|
||||
obj = DevToolsUtils.getProperty(obj, propWithoutIndices);
|
||||
}
|
||||
|
||||
if (!isObjectUsable(obj)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Then traverse the list of indices to get the actual element.
|
||||
let result;
|
||||
let arrayIndicesRegex = /\[[^\]]*\]/g;
|
||||
while ((result = arrayIndicesRegex.exec(aProp)) !== null) {
|
||||
let indexWithBrackets = result[0];
|
||||
let indexAsText = indexWithBrackets.substr(1, indexWithBrackets.length - 2);
|
||||
let index = parseInt(indexAsText);
|
||||
|
||||
if (isNaN(index)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
obj = DevToolsUtils.getProperty(obj, index);
|
||||
|
||||
if (!isObjectUsable(obj)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the given Debugger.Object can be used for autocomplete.
|
||||
*
|
||||
* @param Debugger.Object aObject
|
||||
* The Debugger.Object to check.
|
||||
* @return boolean
|
||||
* True if further inspection into the object is possible, or false
|
||||
* otherwise.
|
||||
*/
|
||||
function isObjectUsable(aObject) {
|
||||
if (aObject == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof aObject == "object" && aObject.class == "DeadObject") {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see getExactMatch_impl()
|
||||
*/
|
||||
function getVariableInEnvironment(anEnvironment, aName) {
|
||||
return getExactMatch_impl(anEnvironment, aName, DebuggerEnvironmentSupport);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see getMatchedProps_impl()
|
||||
*/
|
||||
function getMatchedPropsInEnvironment(anEnvironment, aMatch) {
|
||||
return getMatchedProps_impl(anEnvironment, aMatch, DebuggerEnvironmentSupport);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see getMatchedProps_impl()
|
||||
*/
|
||||
function getMatchedPropsInDbgObject(aDbgObject, aMatch) {
|
||||
return getMatchedProps_impl(aDbgObject, aMatch, DebuggerObjectSupport);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see getMatchedProps_impl()
|
||||
*/
|
||||
function getMatchedProps(aObj, aMatch) {
|
||||
if (typeof aObj != "object") {
|
||||
aObj = aObj.constructor.prototype;
|
||||
}
|
||||
return getMatchedProps_impl(aObj, aMatch, JSObjectSupport);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all properties in the given object (and its parent prototype chain) that
|
||||
* match a given prefix.
|
||||
*
|
||||
* @param mixed aObj
|
||||
* Object whose properties we want to filter.
|
||||
* @param string aMatch
|
||||
* Filter for properties that match this string.
|
||||
* @return object
|
||||
* Object that contains the matchProp and the list of names.
|
||||
*/
|
||||
function getMatchedProps_impl(aObj, aMatch, {chainIterator, getProperties}) {
|
||||
let matches = new Set();
|
||||
let numProps = 0;
|
||||
|
||||
// We need to go up the prototype chain.
|
||||
let iter = chainIterator(aObj);
|
||||
for (let obj of iter) {
|
||||
let props = getProperties(obj);
|
||||
numProps += props.length;
|
||||
|
||||
// If there are too many properties to event attempt autocompletion,
|
||||
// or if we have already added the max number, then stop looping
|
||||
// and return the partial set that has already been discovered.
|
||||
if (numProps >= MAX_AUTOCOMPLETE_ATTEMPTS ||
|
||||
matches.size >= MAX_AUTOCOMPLETIONS) {
|
||||
break;
|
||||
}
|
||||
|
||||
for (let i = 0; i < props.length; i++) {
|
||||
let prop = props[i];
|
||||
if (prop.indexOf(aMatch) != 0) {
|
||||
continue;
|
||||
}
|
||||
if (prop.indexOf('-') > -1) {
|
||||
continue;
|
||||
}
|
||||
// If it is an array index, we can't take it.
|
||||
// This uses a trick: converting a string to a number yields NaN if
|
||||
// the operation failed, and NaN is not equal to itself.
|
||||
if (+prop != +prop) {
|
||||
matches.add(prop);
|
||||
}
|
||||
|
||||
if (matches.size >= MAX_AUTOCOMPLETIONS) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
matchProp: aMatch,
|
||||
matches: [...matches],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a property value based on its name from the given object, by
|
||||
* recursively checking the object's prototype.
|
||||
*
|
||||
* @param object aObj
|
||||
* An object to look the property into.
|
||||
* @param string aName
|
||||
* The property that is looked up.
|
||||
* @returns object|undefined
|
||||
* A Debugger.Object if the property exists in the object's prototype
|
||||
* chain, undefined otherwise.
|
||||
*/
|
||||
function getExactMatch_impl(aObj, aName, {chainIterator, getProperty}) {
|
||||
// We need to go up the prototype chain.
|
||||
let iter = chainIterator(aObj);
|
||||
for (let obj of iter) {
|
||||
let prop = getProperty(obj, aName, aObj);
|
||||
if (prop) {
|
||||
return prop.value;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
var JSObjectSupport = {
|
||||
chainIterator: function*(aObj) {
|
||||
while (aObj) {
|
||||
yield aObj;
|
||||
aObj = Object.getPrototypeOf(aObj);
|
||||
}
|
||||
},
|
||||
|
||||
getProperties: function (aObj) {
|
||||
return Object.getOwnPropertyNames(aObj);
|
||||
},
|
||||
|
||||
getProperty: function () {
|
||||
// getProperty is unsafe with raw JS objects.
|
||||
throw "Unimplemented!";
|
||||
},
|
||||
};
|
||||
|
||||
var DebuggerObjectSupport = {
|
||||
chainIterator: function*(aObj) {
|
||||
while (aObj) {
|
||||
yield aObj;
|
||||
aObj = aObj.proto;
|
||||
}
|
||||
},
|
||||
|
||||
getProperties: function (aObj) {
|
||||
return aObj.getOwnPropertyNames();
|
||||
},
|
||||
|
||||
getProperty: function (aObj, aName, aRootObj) {
|
||||
// This is left unimplemented in favor to DevToolsUtils.getProperty().
|
||||
throw "Unimplemented!";
|
||||
},
|
||||
};
|
||||
|
||||
var DebuggerEnvironmentSupport = {
|
||||
chainIterator: function*(aObj) {
|
||||
while (aObj) {
|
||||
yield aObj;
|
||||
aObj = aObj.parent;
|
||||
}
|
||||
},
|
||||
|
||||
getProperties: function (aObj) {
|
||||
let names = aObj.names();
|
||||
|
||||
// Include 'this' in results (in sorted order)
|
||||
for (let i = 0; i < names.length; i++) {
|
||||
if (i === names.length - 1 || names[i + 1] > "this") {
|
||||
names.splice(i + 1, 0, "this");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return names;
|
||||
},
|
||||
|
||||
getProperty: function (aObj, aName) {
|
||||
let result;
|
||||
// Try/catch since aName can be anything, and getVariable throws if
|
||||
// it's not a valid ECMAScript identifier name
|
||||
try {
|
||||
// TODO: we should use getVariableDescriptor() here - bug 725815.
|
||||
result = aObj.getVariable(aName);
|
||||
} catch (e) {
|
||||
}
|
||||
|
||||
// FIXME: Need actual UI, bug 941287.
|
||||
if (result === undefined || result.optimizedOut || result.missingArguments) {
|
||||
return null;
|
||||
}
|
||||
return {value: result};
|
||||
}
|
||||
};
|
139
contrib/mORMot/SyNode/core_modules/DevTools/stringify.js
Normal file
139
contrib/mORMot/SyNode/core_modules/DevTools/stringify.js
Normal file
@@ -0,0 +1,139 @@
|
||||
import * as DevToolsUtils from 'DevTools/DevToolsUtils.js';
|
||||
|
||||
/**
|
||||
* Stringify a Debugger.Object based on its class.
|
||||
*
|
||||
* @param Debugger.Object obj
|
||||
* The object to stringify.
|
||||
* @return String
|
||||
* The stringification for the object.
|
||||
*/
|
||||
export function stringify(obj) {
|
||||
if (obj.class == "DeadObject") {
|
||||
const error = new Error("Dead object encountered.");
|
||||
DevToolsUtils.reportException("stringify", error);
|
||||
return "<dead object>";
|
||||
}
|
||||
|
||||
const stringifier = stringifiers[obj.class] || stringifiers.Object;
|
||||
|
||||
try {
|
||||
return stringifier(obj);
|
||||
} catch (e) {
|
||||
DevToolsUtils.reportException("stringify", e);
|
||||
return "<failed to stringify object>";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Determine if a given value is non-primitive.
|
||||
*
|
||||
* @param Any value
|
||||
* The value to test.
|
||||
* @return Boolean
|
||||
* Whether the value is non-primitive.
|
||||
*/
|
||||
function isObject(value) {
|
||||
const type = typeof value;
|
||||
return type == "object" ? value !== null : type == "function";
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a function that can safely stringify Debugger.Objects of a given
|
||||
* builtin type.
|
||||
*
|
||||
* @param Function ctor
|
||||
* The builtin class constructor.
|
||||
* @return Function
|
||||
* The stringifier for the class.
|
||||
*/
|
||||
function createBuiltinStringifier(ctor) {
|
||||
return obj => ctor.prototype.toString.call(obj.unsafeDereference());
|
||||
}
|
||||
|
||||
/**
|
||||
* Stringify a Debugger.Object-wrapped Error instance.
|
||||
*
|
||||
* @param Debugger.Object obj
|
||||
* The object to stringify.
|
||||
* @return String
|
||||
* The stringification of the object.
|
||||
*/
|
||||
function errorStringify(obj) {
|
||||
let name = DevToolsUtils.getProperty(obj, "name");
|
||||
if (name === "" || name === undefined) {
|
||||
name = obj.class;
|
||||
} else if (isObject(name)) {
|
||||
name = stringify(name);
|
||||
}
|
||||
|
||||
let message = DevToolsUtils.getProperty(obj, "message");
|
||||
if (isObject(message)) {
|
||||
message = stringify(message);
|
||||
}
|
||||
|
||||
if (message === "" || message === undefined) {
|
||||
return name;
|
||||
}
|
||||
return name + ": " + message;
|
||||
}
|
||||
|
||||
// Used to prevent infinite recursion when an array is found inside itself.
|
||||
let seen = null;
|
||||
|
||||
const stringifiers = {
|
||||
Error: errorStringify,
|
||||
EvalError: errorStringify,
|
||||
RangeError: errorStringify,
|
||||
ReferenceError: errorStringify,
|
||||
SyntaxError: errorStringify,
|
||||
TypeError: errorStringify,
|
||||
URIError: errorStringify,
|
||||
Boolean: createBuiltinStringifier(Boolean),
|
||||
Function: createBuiltinStringifier(Function),
|
||||
Number: createBuiltinStringifier(Number),
|
||||
RegExp: createBuiltinStringifier(RegExp),
|
||||
String: createBuiltinStringifier(String),
|
||||
Object: obj => "[object " + obj.class + "]",
|
||||
Array: obj => {
|
||||
// If we're at the top level then we need to create the Set for tracking
|
||||
// previously stringified arrays.
|
||||
const topLevel = !seen;
|
||||
if (topLevel) {
|
||||
seen = new Set();
|
||||
} else if (seen.has(obj)) {
|
||||
return "";
|
||||
}
|
||||
|
||||
seen.add(obj);
|
||||
|
||||
const len = DevToolsUtils.getProperty(obj, "length");
|
||||
let string = "";
|
||||
|
||||
// The following check is only required because the debuggee could possibly
|
||||
// be a Proxy and return any value. For normal objects, array.length is
|
||||
// always a non-negative integer.
|
||||
if (typeof len == "number" && len > 0) {
|
||||
for (let i = 0; i < len; i++) {
|
||||
const desc = obj.getOwnPropertyDescriptor(i);
|
||||
if (desc) {
|
||||
const { value } = desc;
|
||||
if (value != null) {
|
||||
string += isObject(value) ? stringify(value) : value;
|
||||
}
|
||||
}
|
||||
|
||||
if (i < len - 1) {
|
||||
string += ",";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (topLevel) {
|
||||
seen = null;
|
||||
}
|
||||
|
||||
return string;
|
||||
}
|
||||
};
|
||||
|
48
contrib/mORMot/SyNode/core_modules/ModuleLoader.js
Normal file
48
contrib/mORMot/SyNode/core_modules/ModuleLoader.js
Normal file
@@ -0,0 +1,48 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
// A basic synchronous module loader for testing the shell.
|
||||
let {coreModulesPath, parseModule, setModuleResolveHook, parseModuleRes, _coreModulesInRes} = process.binding('modules');
|
||||
let {loadFile, relToAbs} = process.binding('fs');
|
||||
|
||||
Reflect.Loader = new class {
|
||||
constructor() {
|
||||
this.registry = new Map();
|
||||
this.loadPath = coreModulesPath;
|
||||
}
|
||||
|
||||
resolve(name) {
|
||||
return relToAbs(this.loadPath, name);
|
||||
}
|
||||
|
||||
fetch(path) {
|
||||
//return os.file.readFile(path);
|
||||
return loadFile(path);
|
||||
}
|
||||
|
||||
loadAndParse(name) {
|
||||
let path = _coreModulesInRes ? name.toUpperCase() : this.resolve(name);
|
||||
|
||||
if (this.registry.has(path))
|
||||
return this.registry.get(path);
|
||||
|
||||
let module;
|
||||
if (_coreModulesInRes) {
|
||||
module = parseModuleRes(path);
|
||||
} else {
|
||||
let source = this.fetch(path);
|
||||
module = parseModule(source, path);
|
||||
}
|
||||
this.registry.set(path, module);
|
||||
return module;
|
||||
}
|
||||
|
||||
["import"](name, referrer) {
|
||||
let module = this.loadAndParse(name);
|
||||
module.declarationInstantiation();
|
||||
return module.evaluation();
|
||||
}
|
||||
};
|
||||
setModuleResolveHook((module, requestName) => Reflect.Loader.loadAndParse(requestName));
|
||||
|
1
contrib/mORMot/SyNode/core_modules/core_modules
Normal file
1
contrib/mORMot/SyNode/core_modules/core_modules
Normal file
@@ -0,0 +1 @@
|
||||
../../../../../libs/Synopse/SyNode/core_modules
|
57
contrib/mORMot/SyNode/core_modules/node_modules/_stream_duplex.js
generated
vendored
Normal file
57
contrib/mORMot/SyNode/core_modules/node_modules/_stream_duplex.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
// a duplex stream is just a stream that is both readable and writable.
|
||||
// Since JS doesn't have multiple prototypal inheritance, this class
|
||||
// prototypally inherits from Readable, and then parasitically from
|
||||
// Writable.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = Duplex;
|
||||
|
||||
const util = require('util');
|
||||
const Readable = require('_stream_readable');
|
||||
const Writable = require('_stream_writable');
|
||||
|
||||
util.inherits(Duplex, Readable);
|
||||
|
||||
var keys = Object.keys(Writable.prototype);
|
||||
for (var v = 0; v < keys.length; v++) {
|
||||
var method = keys[v];
|
||||
if (!Duplex.prototype[method])
|
||||
Duplex.prototype[method] = Writable.prototype[method];
|
||||
}
|
||||
|
||||
function Duplex(options) {
|
||||
if (!(this instanceof Duplex))
|
||||
return new Duplex(options);
|
||||
|
||||
Readable.call(this, options);
|
||||
Writable.call(this, options);
|
||||
|
||||
if (options && options.readable === false)
|
||||
this.readable = false;
|
||||
|
||||
if (options && options.writable === false)
|
||||
this.writable = false;
|
||||
|
||||
this.allowHalfOpen = true;
|
||||
if (options && options.allowHalfOpen === false)
|
||||
this.allowHalfOpen = false;
|
||||
|
||||
this.once('end', onend);
|
||||
}
|
||||
|
||||
// the no-half-open enforcer
|
||||
function onend() {
|
||||
// if we allow half-open state, or if the writable side ended,
|
||||
// then we're ok.
|
||||
if (this.allowHalfOpen || this._writableState.ended)
|
||||
return;
|
||||
|
||||
// no more data can be written.
|
||||
// But allow more writes to happen in this tick.
|
||||
process.nextTick(onEndNT, this);
|
||||
}
|
||||
|
||||
function onEndNT(self) {
|
||||
self.end();
|
||||
}
|
22
contrib/mORMot/SyNode/core_modules/node_modules/_stream_passthrough.js
generated
vendored
Normal file
22
contrib/mORMot/SyNode/core_modules/node_modules/_stream_passthrough.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
// a passthrough stream.
|
||||
// basically just the most minimal sort of Transform stream.
|
||||
// Every written chunk gets output as-is.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = PassThrough;
|
||||
|
||||
const Transform = require('_stream_transform');
|
||||
const util = require('util');
|
||||
util.inherits(PassThrough, Transform);
|
||||
|
||||
function PassThrough(options) {
|
||||
if (!(this instanceof PassThrough))
|
||||
return new PassThrough(options);
|
||||
|
||||
Transform.call(this, options);
|
||||
}
|
||||
|
||||
PassThrough.prototype._transform = function(chunk, encoding, cb) {
|
||||
cb(null, chunk);
|
||||
};
|
976
contrib/mORMot/SyNode/core_modules/node_modules/_stream_readable.js
generated
vendored
Normal file
976
contrib/mORMot/SyNode/core_modules/node_modules/_stream_readable.js
generated
vendored
Normal file
@@ -0,0 +1,976 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = Readable;
|
||||
Readable.ReadableState = ReadableState;
|
||||
|
||||
const EE = require('events');
|
||||
const Stream = require('stream');
|
||||
const Buffer = require('buffer').Buffer;
|
||||
const util = require('util');
|
||||
const debug = util.debuglog('stream');
|
||||
const BufferList = require('internal/streams/BufferList');
|
||||
var StringDecoder;
|
||||
|
||||
util.inherits(Readable, Stream);
|
||||
|
||||
function prependListener(emitter, event, fn) {
|
||||
// Sadly this is not cacheable as some libraries bundle their own
|
||||
// event emitter implementation with them.
|
||||
if (typeof emitter.prependListener === 'function') {
|
||||
return emitter.prependListener(event, fn);
|
||||
} else {
|
||||
// This is a hack to make sure that our error handler is attached before any
|
||||
// userland ones. NEVER DO THIS. This is here only because this code needs
|
||||
// to continue to work with older versions of Node.js that do not include
|
||||
// the prependListener() method. The goal is to eventually remove this hack.
|
||||
if (!emitter._events || !emitter._events[event])
|
||||
emitter.on(event, fn);
|
||||
else if (Array.isArray(emitter._events[event]))
|
||||
emitter._events[event].unshift(fn);
|
||||
else
|
||||
emitter._events[event] = [fn, emitter._events[event]];
|
||||
}
|
||||
}
|
||||
|
||||
function ReadableState(options, stream) {
|
||||
options = options || {};
|
||||
|
||||
// object stream flag. Used to make read(n) ignore n and to
|
||||
// make all the buffer merging and length checks go away
|
||||
this.objectMode = !!options.objectMode;
|
||||
|
||||
if (stream instanceof Stream.Duplex)
|
||||
this.objectMode = this.objectMode || !!options.readableObjectMode;
|
||||
|
||||
// the point at which it stops calling _read() to fill the buffer
|
||||
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
||||
var hwm = options.highWaterMark;
|
||||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||||
this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
|
||||
|
||||
// cast to ints.
|
||||
this.highWaterMark = ~~this.highWaterMark;
|
||||
|
||||
// A linked list is used to store data chunks instead of an array because the
|
||||
// linked list can remove elements from the beginning faster than
|
||||
// array.shift()
|
||||
this.buffer = new BufferList();
|
||||
this.length = 0;
|
||||
this.pipes = null;
|
||||
this.pipesCount = 0;
|
||||
this.flowing = null;
|
||||
this.ended = false;
|
||||
this.endEmitted = false;
|
||||
this.reading = false;
|
||||
|
||||
// a flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true;
|
||||
|
||||
// whenever we return null, then we set a flag to say
|
||||
// that we're awaiting a 'readable' event emission.
|
||||
this.needReadable = false;
|
||||
this.emittedReadable = false;
|
||||
this.readableListening = false;
|
||||
this.resumeScheduled = false;
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||||
|
||||
// when piping, we only care about 'readable' events that happen
|
||||
// after read()ing all the bytes and not getting any pushback.
|
||||
this.ranOut = false;
|
||||
|
||||
// the number of writers that are awaiting a drain event in .pipe()s
|
||||
this.awaitDrain = 0;
|
||||
|
||||
// if true, a maybeReadMore has been scheduled
|
||||
this.readingMore = false;
|
||||
|
||||
this.decoder = null;
|
||||
this.encoding = null;
|
||||
if (options.encoding) {
|
||||
if (!StringDecoder)
|
||||
StringDecoder = require('string_decoder').StringDecoder;
|
||||
this.decoder = new StringDecoder(options.encoding);
|
||||
this.encoding = options.encoding;
|
||||
}
|
||||
}
|
||||
|
||||
function Readable(options) {
|
||||
if (!(this instanceof Readable))
|
||||
return new Readable(options);
|
||||
|
||||
this._readableState = new ReadableState(options, this);
|
||||
|
||||
// legacy
|
||||
this.readable = true;
|
||||
|
||||
if (options && typeof options.read === 'function')
|
||||
this._read = options.read;
|
||||
|
||||
Stream.call(this);
|
||||
}
|
||||
|
||||
// Manually shove something into the read() buffer.
|
||||
// This returns true if the highWaterMark has not been hit yet,
|
||||
// similar to how Writable.write() returns true if you should
|
||||
// write() some more.
|
||||
Readable.prototype.push = function(chunk, encoding) {
|
||||
var state = this._readableState;
|
||||
|
||||
if (!state.objectMode && typeof chunk === 'string') {
|
||||
encoding = encoding || state.defaultEncoding;
|
||||
if (encoding !== state.encoding) {
|
||||
chunk = Buffer.from(chunk, encoding);
|
||||
encoding = '';
|
||||
}
|
||||
}
|
||||
|
||||
return readableAddChunk(this, state, chunk, encoding, false);
|
||||
};
|
||||
|
||||
// Unshift should *always* be something directly out of read()
|
||||
Readable.prototype.unshift = function(chunk) {
|
||||
var state = this._readableState;
|
||||
return readableAddChunk(this, state, chunk, '', true);
|
||||
};
|
||||
|
||||
Readable.prototype.isPaused = function() {
|
||||
return this._readableState.flowing === false;
|
||||
};
|
||||
|
||||
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
|
||||
var er = chunkInvalid(state, chunk);
|
||||
if (er) {
|
||||
stream.emit('error', er);
|
||||
} else if (chunk === null) {
|
||||
state.reading = false;
|
||||
onEofChunk(stream, state);
|
||||
} else if (state.objectMode || chunk && chunk.length > 0) {
|
||||
if (state.ended && !addToFront) {
|
||||
const e = new Error('stream.push() after EOF');
|
||||
stream.emit('error', e);
|
||||
} else if (state.endEmitted && addToFront) {
|
||||
const e = new Error('stream.unshift() after end event');
|
||||
stream.emit('error', e);
|
||||
} else {
|
||||
var skipAdd;
|
||||
if (state.decoder && !addToFront && !encoding) {
|
||||
chunk = state.decoder.write(chunk);
|
||||
skipAdd = (!state.objectMode && chunk.length === 0);
|
||||
}
|
||||
|
||||
if (!addToFront)
|
||||
state.reading = false;
|
||||
|
||||
// Don't add to the buffer if we've decoded to an empty string chunk and
|
||||
// we're not in object mode
|
||||
if (!skipAdd) {
|
||||
// if we want the data now, just emit it.
|
||||
if (state.flowing && state.length === 0 && !state.sync) {
|
||||
stream.emit('data', chunk);
|
||||
stream.read(0);
|
||||
} else {
|
||||
// update the buffer info.
|
||||
state.length += state.objectMode ? 1 : chunk.length;
|
||||
if (addToFront)
|
||||
state.buffer.unshift(chunk);
|
||||
else
|
||||
state.buffer.push(chunk);
|
||||
|
||||
if (state.needReadable)
|
||||
emitReadable(stream);
|
||||
}
|
||||
}
|
||||
|
||||
maybeReadMore(stream, state);
|
||||
}
|
||||
} else if (!addToFront) {
|
||||
state.reading = false;
|
||||
}
|
||||
|
||||
return needMoreData(state);
|
||||
}
|
||||
|
||||
|
||||
// if it's past the high water mark, we can push in some more.
|
||||
// Also, if we have no data yet, we can stand some
|
||||
// more bytes. This is to work around cases where hwm=0,
|
||||
// such as the repl. Also, if the push() triggered a
|
||||
// readable event, and the user called read(largeNumber) such that
|
||||
// needReadable was set, then we ought to push more, so that another
|
||||
// 'readable' event will be triggered.
|
||||
function needMoreData(state) {
|
||||
return !state.ended &&
|
||||
(state.needReadable ||
|
||||
state.length < state.highWaterMark ||
|
||||
state.length === 0);
|
||||
}
|
||||
|
||||
// backwards compatibility.
|
||||
Readable.prototype.setEncoding = function(enc) {
|
||||
if (!StringDecoder)
|
||||
StringDecoder = require('string_decoder').StringDecoder;
|
||||
this._readableState.decoder = new StringDecoder(enc);
|
||||
this._readableState.encoding = enc;
|
||||
return this;
|
||||
};
|
||||
|
||||
// Don't raise the hwm > 8MB
|
||||
const MAX_HWM = 0x800000;
|
||||
function computeNewHighWaterMark(n) {
|
||||
if (n >= MAX_HWM) {
|
||||
n = MAX_HWM;
|
||||
} else {
|
||||
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
||||
// tiny amounts
|
||||
n--;
|
||||
n |= n >>> 1;
|
||||
n |= n >>> 2;
|
||||
n |= n >>> 4;
|
||||
n |= n >>> 8;
|
||||
n |= n >>> 16;
|
||||
n++;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
// This function is designed to be inlinable, so please take care when making
|
||||
// changes to the function body.
|
||||
function howMuchToRead(n, state) {
|
||||
if (n <= 0 || (state.length === 0 && state.ended))
|
||||
return 0;
|
||||
if (state.objectMode)
|
||||
return 1;
|
||||
if (n !== n) {
|
||||
// Only flow one buffer at a time
|
||||
if (state.flowing && state.length)
|
||||
return state.buffer.head.data.length;
|
||||
else
|
||||
return state.length;
|
||||
}
|
||||
// If we're asking for more than the current hwm, then raise the hwm.
|
||||
if (n > state.highWaterMark)
|
||||
state.highWaterMark = computeNewHighWaterMark(n);
|
||||
if (n <= state.length)
|
||||
return n;
|
||||
// Don't have enough
|
||||
if (!state.ended) {
|
||||
state.needReadable = true;
|
||||
return 0;
|
||||
}
|
||||
return state.length;
|
||||
}
|
||||
|
||||
// you can override either this method, or the async _read(n) below.
|
||||
Readable.prototype.read = function(n) {
|
||||
debug('read', n);
|
||||
n = parseInt(n, 10);
|
||||
var state = this._readableState;
|
||||
var nOrig = n;
|
||||
|
||||
if (n !== 0)
|
||||
state.emittedReadable = false;
|
||||
|
||||
// if we're doing read(0) to trigger a readable event, but we
|
||||
// already have a bunch of data in the buffer, then just trigger
|
||||
// the 'readable' event and move on.
|
||||
if (n === 0 &&
|
||||
state.needReadable &&
|
||||
(state.length >= state.highWaterMark || state.ended)) {
|
||||
debug('read: emitReadable', state.length, state.ended);
|
||||
if (state.length === 0 && state.ended)
|
||||
endReadable(this);
|
||||
else
|
||||
emitReadable(this);
|
||||
return null;
|
||||
}
|
||||
|
||||
n = howMuchToRead(n, state);
|
||||
|
||||
// if we've ended, and we're now clear, then finish it up.
|
||||
if (n === 0 && state.ended) {
|
||||
if (state.length === 0)
|
||||
endReadable(this);
|
||||
return null;
|
||||
}
|
||||
|
||||
// All the actual chunk generation logic needs to be
|
||||
// *below* the call to _read. The reason is that in certain
|
||||
// synthetic stream cases, such as passthrough streams, _read
|
||||
// may be a completely synchronous operation which may change
|
||||
// the state of the read buffer, providing enough data when
|
||||
// before there was *not* enough.
|
||||
//
|
||||
// So, the steps are:
|
||||
// 1. Figure out what the state of things will be after we do
|
||||
// a read from the buffer.
|
||||
//
|
||||
// 2. If that resulting state will trigger a _read, then call _read.
|
||||
// Note that this may be asynchronous, or synchronous. Yes, it is
|
||||
// deeply ugly to write APIs this way, but that still doesn't mean
|
||||
// that the Readable class should behave improperly, as streams are
|
||||
// designed to be sync/async agnostic.
|
||||
// Take note if the _read call is sync or async (ie, if the read call
|
||||
// has returned yet), so that we know whether or not it's safe to emit
|
||||
// 'readable' etc.
|
||||
//
|
||||
// 3. Actually pull the requested chunks out of the buffer and return.
|
||||
|
||||
// if we need a readable event, then we need to do some reading.
|
||||
var doRead = state.needReadable;
|
||||
debug('need readable', doRead);
|
||||
|
||||
// if we currently have less than the highWaterMark, then also read some
|
||||
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
||||
doRead = true;
|
||||
debug('length less than watermark', doRead);
|
||||
}
|
||||
|
||||
// however, if we've ended, then there's no point, and if we're already
|
||||
// reading, then it's unnecessary.
|
||||
if (state.ended || state.reading) {
|
||||
doRead = false;
|
||||
debug('reading or ended', doRead);
|
||||
} else if (doRead) {
|
||||
debug('do read');
|
||||
state.reading = true;
|
||||
state.sync = true;
|
||||
// if the length is currently zero, then we *need* a readable event.
|
||||
if (state.length === 0)
|
||||
state.needReadable = true;
|
||||
// call internal read method
|
||||
this._read(state.highWaterMark);
|
||||
state.sync = false;
|
||||
// If _read pushed data synchronously, then `reading` will be false,
|
||||
// and we need to re-evaluate how much data we can return to the user.
|
||||
if (!state.reading)
|
||||
n = howMuchToRead(nOrig, state);
|
||||
}
|
||||
|
||||
var ret;
|
||||
if (n > 0)
|
||||
ret = fromList(n, state);
|
||||
else
|
||||
ret = null;
|
||||
|
||||
if (ret === null) {
|
||||
state.needReadable = true;
|
||||
n = 0;
|
||||
} else {
|
||||
state.length -= n;
|
||||
}
|
||||
|
||||
if (state.length === 0) {
|
||||
// If we have nothing in the buffer, then we want to know
|
||||
// as soon as we *do* get something into the buffer.
|
||||
if (!state.ended)
|
||||
state.needReadable = true;
|
||||
|
||||
// If we tried to read() past the EOF, then emit end on the next tick.
|
||||
if (nOrig !== n && state.ended)
|
||||
endReadable(this);
|
||||
}
|
||||
|
||||
if (ret !== null)
|
||||
this.emit('data', ret);
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
function chunkInvalid(state, chunk) {
|
||||
var er = null;
|
||||
if (!(chunk instanceof Buffer) &&
|
||||
typeof chunk !== 'string' &&
|
||||
chunk !== null &&
|
||||
chunk !== undefined &&
|
||||
!state.objectMode) {
|
||||
er = new TypeError('Invalid non-string/buffer chunk');
|
||||
}
|
||||
return er;
|
||||
}
|
||||
|
||||
|
||||
function onEofChunk(stream, state) {
|
||||
if (state.ended) return;
|
||||
if (state.decoder) {
|
||||
var chunk = state.decoder.end();
|
||||
if (chunk && chunk.length) {
|
||||
state.buffer.push(chunk);
|
||||
state.length += state.objectMode ? 1 : chunk.length;
|
||||
}
|
||||
}
|
||||
state.ended = true;
|
||||
|
||||
// emit 'readable' now to make sure it gets picked up.
|
||||
emitReadable(stream);
|
||||
}
|
||||
|
||||
// Don't emit readable right away in sync mode, because this can trigger
|
||||
// another read() call => stack overflow. This way, it might trigger
|
||||
// a nextTick recursion warning, but that's not so bad.
|
||||
function emitReadable(stream) {
|
||||
var state = stream._readableState;
|
||||
state.needReadable = false;
|
||||
if (!state.emittedReadable) {
|
||||
debug('emitReadable', state.flowing);
|
||||
state.emittedReadable = true;
|
||||
if (state.sync)
|
||||
process.nextTick(emitReadable_, stream);
|
||||
else
|
||||
emitReadable_(stream);
|
||||
}
|
||||
}
|
||||
|
||||
function emitReadable_(stream) {
|
||||
debug('emit readable');
|
||||
stream.emit('readable');
|
||||
flow(stream);
|
||||
}
|
||||
|
||||
|
||||
// at this point, the user has presumably seen the 'readable' event,
|
||||
// and called read() to consume some data. that may have triggered
|
||||
// in turn another _read(n) call, in which case reading = true if
|
||||
// it's in progress.
|
||||
// However, if we're not ended, or reading, and the length < hwm,
|
||||
// then go ahead and try to read some more preemptively.
|
||||
function maybeReadMore(stream, state) {
|
||||
if (!state.readingMore) {
|
||||
state.readingMore = true;
|
||||
process.nextTick(maybeReadMore_, stream, state);
|
||||
}
|
||||
}
|
||||
|
||||
function maybeReadMore_(stream, state) {
|
||||
var len = state.length;
|
||||
while (!state.reading && !state.flowing && !state.ended &&
|
||||
state.length < state.highWaterMark) {
|
||||
debug('maybeReadMore read 0');
|
||||
stream.read(0);
|
||||
if (len === state.length)
|
||||
// didn't get any data, stop spinning.
|
||||
break;
|
||||
else
|
||||
len = state.length;
|
||||
}
|
||||
state.readingMore = false;
|
||||
}
|
||||
|
||||
// abstract method. to be overridden in specific implementation classes.
|
||||
// call cb(er, data) where data is <= n in length.
|
||||
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
||||
// arbitrary, and perhaps not very meaningful.
|
||||
Readable.prototype._read = function(n) {
|
||||
this.emit('error', new Error('not implemented'));
|
||||
};
|
||||
|
||||
Readable.prototype.pipe = function(dest, pipeOpts) {
|
||||
var src = this;
|
||||
var state = this._readableState;
|
||||
|
||||
switch (state.pipesCount) {
|
||||
case 0:
|
||||
state.pipes = dest;
|
||||
break;
|
||||
case 1:
|
||||
state.pipes = [state.pipes, dest];
|
||||
break;
|
||||
default:
|
||||
state.pipes.push(dest);
|
||||
break;
|
||||
}
|
||||
state.pipesCount += 1;
|
||||
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
||||
|
||||
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
|
||||
dest !== process.stdout &&
|
||||
dest !== process.stderr;
|
||||
|
||||
var endFn = doEnd ? onend : cleanup;
|
||||
if (state.endEmitted)
|
||||
process.nextTick(endFn);
|
||||
else
|
||||
src.once('end', endFn);
|
||||
|
||||
dest.on('unpipe', onunpipe);
|
||||
function onunpipe(readable) {
|
||||
debug('onunpipe');
|
||||
if (readable === src) {
|
||||
cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
function onend() {
|
||||
debug('onend');
|
||||
dest.end();
|
||||
}
|
||||
|
||||
// when the dest drains, it reduces the awaitDrain counter
|
||||
// on the source. This would be more elegant with a .once()
|
||||
// handler in flow(), but adding and removing repeatedly is
|
||||
// too slow.
|
||||
var ondrain = pipeOnDrain(src);
|
||||
dest.on('drain', ondrain);
|
||||
|
||||
var cleanedUp = false;
|
||||
function cleanup() {
|
||||
debug('cleanup');
|
||||
// cleanup event handlers once the pipe is broken
|
||||
dest.removeListener('close', onclose);
|
||||
dest.removeListener('finish', onfinish);
|
||||
dest.removeListener('drain', ondrain);
|
||||
dest.removeListener('error', onerror);
|
||||
dest.removeListener('unpipe', onunpipe);
|
||||
src.removeListener('end', onend);
|
||||
src.removeListener('end', cleanup);
|
||||
src.removeListener('data', ondata);
|
||||
|
||||
cleanedUp = true;
|
||||
|
||||
// if the reader is waiting for a drain event from this
|
||||
// specific writer, then it would cause it to never start
|
||||
// flowing again.
|
||||
// So, if this is awaiting a drain, then we just call it now.
|
||||
// If we don't know, then assume that we are waiting for one.
|
||||
if (state.awaitDrain &&
|
||||
(!dest._writableState || dest._writableState.needDrain))
|
||||
ondrain();
|
||||
}
|
||||
|
||||
// If the user pushes more data while we're writing to dest then we'll end up
|
||||
// in ondata again. However, we only want to increase awaitDrain once because
|
||||
// dest will only emit one 'drain' event for the multiple writes.
|
||||
// => Introduce a guard on increasing awaitDrain.
|
||||
var increasedAwaitDrain = false;
|
||||
src.on('data', ondata);
|
||||
function ondata(chunk) {
|
||||
debug('ondata');
|
||||
increasedAwaitDrain = false;
|
||||
var ret = dest.write(chunk);
|
||||
if (false === ret && !increasedAwaitDrain) {
|
||||
// If the user unpiped during `dest.write()`, it is possible
|
||||
// to get stuck in a permanently paused state if that write
|
||||
// also returned false.
|
||||
// => Check whether `dest` is still a piping destination.
|
||||
if (((state.pipesCount === 1 && state.pipes === dest) ||
|
||||
(state.pipesCount > 1 && state.pipes.indexOf(dest) !== -1)) &&
|
||||
!cleanedUp) {
|
||||
debug('false write response, pause', src._readableState.awaitDrain);
|
||||
src._readableState.awaitDrain++;
|
||||
increasedAwaitDrain = true;
|
||||
}
|
||||
src.pause();
|
||||
}
|
||||
}
|
||||
|
||||
// if the dest has an error, then stop piping into it.
|
||||
// however, don't suppress the throwing behavior for this.
|
||||
function onerror(er) {
|
||||
debug('onerror', er);
|
||||
unpipe();
|
||||
dest.removeListener('error', onerror);
|
||||
if (EE.listenerCount(dest, 'error') === 0)
|
||||
dest.emit('error', er);
|
||||
}
|
||||
|
||||
// Make sure our error handler is attached before userland ones.
|
||||
prependListener(dest, 'error', onerror);
|
||||
|
||||
// Both close and finish should trigger unpipe, but only once.
|
||||
function onclose() {
|
||||
dest.removeListener('finish', onfinish);
|
||||
unpipe();
|
||||
}
|
||||
dest.once('close', onclose);
|
||||
function onfinish() {
|
||||
debug('onfinish');
|
||||
dest.removeListener('close', onclose);
|
||||
unpipe();
|
||||
}
|
||||
dest.once('finish', onfinish);
|
||||
|
||||
function unpipe() {
|
||||
debug('unpipe');
|
||||
src.unpipe(dest);
|
||||
}
|
||||
|
||||
// tell the dest that it's being piped to
|
||||
dest.emit('pipe', src);
|
||||
|
||||
// start the flow if it hasn't been started already.
|
||||
if (!state.flowing) {
|
||||
debug('pipe resume');
|
||||
src.resume();
|
||||
}
|
||||
|
||||
return dest;
|
||||
};
|
||||
|
||||
function pipeOnDrain(src) {
|
||||
return function() {
|
||||
var state = src._readableState;
|
||||
debug('pipeOnDrain', state.awaitDrain);
|
||||
if (state.awaitDrain)
|
||||
state.awaitDrain--;
|
||||
if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) {
|
||||
state.flowing = true;
|
||||
flow(src);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
Readable.prototype.unpipe = function(dest) {
|
||||
var state = this._readableState;
|
||||
|
||||
// if we're not piping anywhere, then do nothing.
|
||||
if (state.pipesCount === 0)
|
||||
return this;
|
||||
|
||||
// just one destination. most common case.
|
||||
if (state.pipesCount === 1) {
|
||||
// passed in one, but it's not the right one.
|
||||
if (dest && dest !== state.pipes)
|
||||
return this;
|
||||
|
||||
if (!dest)
|
||||
dest = state.pipes;
|
||||
|
||||
// got a match.
|
||||
state.pipes = null;
|
||||
state.pipesCount = 0;
|
||||
state.flowing = false;
|
||||
if (dest)
|
||||
dest.emit('unpipe', this);
|
||||
return this;
|
||||
}
|
||||
|
||||
// slow case. multiple pipe destinations.
|
||||
|
||||
if (!dest) {
|
||||
// remove all.
|
||||
var dests = state.pipes;
|
||||
var len = state.pipesCount;
|
||||
state.pipes = null;
|
||||
state.pipesCount = 0;
|
||||
state.flowing = false;
|
||||
|
||||
for (let i = 0; i < len; i++)
|
||||
dests[i].emit('unpipe', this);
|
||||
return this;
|
||||
}
|
||||
|
||||
// try to find the right one.
|
||||
const i = state.pipes.indexOf(dest);
|
||||
if (i === -1)
|
||||
return this;
|
||||
|
||||
state.pipes.splice(i, 1);
|
||||
state.pipesCount -= 1;
|
||||
if (state.pipesCount === 1)
|
||||
state.pipes = state.pipes[0];
|
||||
|
||||
dest.emit('unpipe', this);
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
// set up data events if they are asked for
|
||||
// Ensure readable listeners eventually get something
|
||||
Readable.prototype.on = function(ev, fn) {
|
||||
const res = Stream.prototype.on.call(this, ev, fn);
|
||||
|
||||
if (ev === 'data') {
|
||||
// Start flowing on next tick if stream isn't explicitly paused
|
||||
if (this._readableState.flowing !== false)
|
||||
this.resume();
|
||||
} else if (ev === 'readable') {
|
||||
const state = this._readableState;
|
||||
if (!state.endEmitted && !state.readableListening) {
|
||||
state.readableListening = state.needReadable = true;
|
||||
state.emittedReadable = false;
|
||||
if (!state.reading) {
|
||||
process.nextTick(nReadingNextTick, this);
|
||||
} else if (state.length) {
|
||||
emitReadable(this, state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
Readable.prototype.addListener = Readable.prototype.on;
|
||||
|
||||
function nReadingNextTick(self) {
|
||||
debug('readable nexttick read 0');
|
||||
self.read(0);
|
||||
}
|
||||
|
||||
// pause() and resume() are remnants of the legacy readable stream API
|
||||
// If the user uses them, then switch into old mode.
|
||||
Readable.prototype.resume = function() {
|
||||
var state = this._readableState;
|
||||
if (!state.flowing) {
|
||||
debug('resume');
|
||||
state.flowing = true;
|
||||
resume(this, state);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
function resume(stream, state) {
|
||||
if (!state.resumeScheduled) {
|
||||
state.resumeScheduled = true;
|
||||
process.nextTick(resume_, stream, state);
|
||||
}
|
||||
}
|
||||
|
||||
function resume_(stream, state) {
|
||||
if (!state.reading) {
|
||||
debug('resume read 0');
|
||||
stream.read(0);
|
||||
}
|
||||
|
||||
state.resumeScheduled = false;
|
||||
state.awaitDrain = 0;
|
||||
stream.emit('resume');
|
||||
flow(stream);
|
||||
if (state.flowing && !state.reading)
|
||||
stream.read(0);
|
||||
}
|
||||
|
||||
Readable.prototype.pause = function() {
|
||||
debug('call pause flowing=%j', this._readableState.flowing);
|
||||
if (false !== this._readableState.flowing) {
|
||||
debug('pause');
|
||||
this._readableState.flowing = false;
|
||||
this.emit('pause');
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
function flow(stream) {
|
||||
const state = stream._readableState;
|
||||
debug('flow', state.flowing);
|
||||
while (state.flowing && stream.read() !== null);
|
||||
}
|
||||
|
||||
// wrap an old-style stream as the async data source.
|
||||
// This is *not* part of the readable stream interface.
|
||||
// It is an ugly unfortunate mess of history.
|
||||
Readable.prototype.wrap = function(stream) {
|
||||
var state = this._readableState;
|
||||
var paused = false;
|
||||
|
||||
var self = this;
|
||||
stream.on('end', function() {
|
||||
debug('wrapped end');
|
||||
if (state.decoder && !state.ended) {
|
||||
var chunk = state.decoder.end();
|
||||
if (chunk && chunk.length)
|
||||
self.push(chunk);
|
||||
}
|
||||
|
||||
self.push(null);
|
||||
});
|
||||
|
||||
stream.on('data', function(chunk) {
|
||||
debug('wrapped data');
|
||||
if (state.decoder)
|
||||
chunk = state.decoder.write(chunk);
|
||||
|
||||
// don't skip over falsy values in objectMode
|
||||
if (state.objectMode && (chunk === null || chunk === undefined))
|
||||
return;
|
||||
else if (!state.objectMode && (!chunk || !chunk.length))
|
||||
return;
|
||||
|
||||
var ret = self.push(chunk);
|
||||
if (!ret) {
|
||||
paused = true;
|
||||
stream.pause();
|
||||
}
|
||||
});
|
||||
|
||||
// proxy all the other methods.
|
||||
// important when wrapping filters and duplexes.
|
||||
for (var i in stream) {
|
||||
if (this[i] === undefined && typeof stream[i] === 'function') {
|
||||
this[i] = function(method) {
|
||||
return function() {
|
||||
return stream[method].apply(stream, arguments);
|
||||
};
|
||||
}(i);
|
||||
}
|
||||
}
|
||||
|
||||
// proxy certain important events.
|
||||
const events = ['error', 'close', 'destroy', 'pause', 'resume'];
|
||||
events.forEach(function(ev) {
|
||||
stream.on(ev, self.emit.bind(self, ev));
|
||||
});
|
||||
|
||||
// when we try to consume some more bytes, simply unpause the
|
||||
// underlying stream.
|
||||
self._read = function(n) {
|
||||
debug('wrapped _read', n);
|
||||
if (paused) {
|
||||
paused = false;
|
||||
stream.resume();
|
||||
}
|
||||
};
|
||||
|
||||
return self;
|
||||
};
|
||||
|
||||
|
||||
// exposed for testing purposes only.
|
||||
Readable._fromList = fromList;
|
||||
|
||||
// Pluck off n bytes from an array of buffers.
|
||||
// Length is the combined lengths of all the buffers in the list.
|
||||
// This function is designed to be inlinable, so please take care when making
|
||||
// changes to the function body.
|
||||
function fromList(n, state) {
|
||||
// nothing buffered
|
||||
if (state.length === 0)
|
||||
return null;
|
||||
|
||||
var ret;
|
||||
if (state.objectMode)
|
||||
ret = state.buffer.shift();
|
||||
else if (!n || n >= state.length) {
|
||||
// read it all, truncate the list
|
||||
if (state.decoder)
|
||||
ret = state.buffer.join('');
|
||||
else if (state.buffer.length === 1)
|
||||
ret = state.buffer.head.data;
|
||||
else
|
||||
ret = state.buffer.concat(state.length);
|
||||
state.buffer.clear();
|
||||
} else {
|
||||
// read part of list
|
||||
ret = fromListPartial(n, state.buffer, state.decoder);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Extracts only enough buffered data to satisfy the amount requested.
|
||||
// This function is designed to be inlinable, so please take care when making
|
||||
// changes to the function body.
|
||||
function fromListPartial(n, list, hasStrings) {
|
||||
var ret;
|
||||
if (n < list.head.data.length) {
|
||||
// slice is the same for buffers and strings
|
||||
ret = list.head.data.slice(0, n);
|
||||
list.head.data = list.head.data.slice(n);
|
||||
} else if (n === list.head.data.length) {
|
||||
// first chunk is a perfect match
|
||||
ret = list.shift();
|
||||
} else {
|
||||
// result spans more than one buffer
|
||||
ret = (hasStrings
|
||||
? copyFromBufferString(n, list)
|
||||
: copyFromBuffer(n, list));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Copies a specified amount of characters from the list of buffered data
|
||||
// chunks.
|
||||
// This function is designed to be inlinable, so please take care when making
|
||||
// changes to the function body.
|
||||
function copyFromBufferString(n, list) {
|
||||
var p = list.head;
|
||||
var c = 1;
|
||||
var ret = p.data;
|
||||
n -= ret.length;
|
||||
while (p = p.next) {
|
||||
const str = p.data;
|
||||
const nb = (n > str.length ? str.length : n);
|
||||
if (nb === str.length)
|
||||
ret += str;
|
||||
else
|
||||
ret += str.slice(0, n);
|
||||
n -= nb;
|
||||
if (n === 0) {
|
||||
if (nb === str.length) {
|
||||
++c;
|
||||
if (p.next)
|
||||
list.head = p.next;
|
||||
else
|
||||
list.head = list.tail = null;
|
||||
} else {
|
||||
list.head = p;
|
||||
p.data = str.slice(nb);
|
||||
}
|
||||
break;
|
||||
}
|
||||
++c;
|
||||
}
|
||||
list.length -= c;
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Copies a specified amount of bytes from the list of buffered data chunks.
|
||||
// This function is designed to be inlinable, so please take care when making
|
||||
// changes to the function body.
|
||||
function copyFromBuffer(n, list) {
|
||||
const ret = Buffer.allocUnsafe(n);
|
||||
var p = list.head;
|
||||
var c = 1;
|
||||
p.data.copy(ret);
|
||||
n -= p.data.length;
|
||||
while (p = p.next) {
|
||||
const buf = p.data;
|
||||
const nb = (n > buf.length ? buf.length : n);
|
||||
buf.copy(ret, ret.length - n, 0, nb);
|
||||
n -= nb;
|
||||
if (n === 0) {
|
||||
if (nb === buf.length) {
|
||||
++c;
|
||||
if (p.next)
|
||||
list.head = p.next;
|
||||
else
|
||||
list.head = list.tail = null;
|
||||
} else {
|
||||
list.head = p;
|
||||
p.data = buf.slice(nb);
|
||||
}
|
||||
break;
|
||||
}
|
||||
++c;
|
||||
}
|
||||
list.length -= c;
|
||||
return ret;
|
||||
}
|
||||
|
||||
function endReadable(stream) {
|
||||
var state = stream._readableState;
|
||||
|
||||
// If we get here before consuming all the bytes, then that is a
|
||||
// bug in node. Should never happen.
|
||||
if (state.length > 0)
|
||||
throw new Error('"endReadable()" called on non-empty stream');
|
||||
|
||||
if (!state.endEmitted) {
|
||||
state.ended = true;
|
||||
process.nextTick(endReadableNT, state, stream);
|
||||
}
|
||||
}
|
||||
|
||||
function endReadableNT(state, stream) {
|
||||
// Check that we didn't get one last unshift.
|
||||
if (!state.endEmitted && state.length === 0) {
|
||||
state.endEmitted = true;
|
||||
stream.readable = false;
|
||||
stream.emit('end');
|
||||
}
|
||||
}
|
195
contrib/mORMot/SyNode/core_modules/node_modules/_stream_transform.js
generated
vendored
Normal file
195
contrib/mORMot/SyNode/core_modules/node_modules/_stream_transform.js
generated
vendored
Normal file
@@ -0,0 +1,195 @@
|
||||
// a transform stream is a readable/writable stream where you do
|
||||
// something with the data. Sometimes it's called a "filter",
|
||||
// but that's not a great name for it, since that implies a thing where
|
||||
// some bits pass through, and others are simply ignored. (That would
|
||||
// be a valid example of a transform, of course.)
|
||||
//
|
||||
// While the output is causally related to the input, it's not a
|
||||
// necessarily symmetric or synchronous transformation. For example,
|
||||
// a zlib stream might take multiple plain-text writes(), and then
|
||||
// emit a single compressed chunk some time in the future.
|
||||
//
|
||||
// Here's how this works:
|
||||
//
|
||||
// The Transform stream has all the aspects of the readable and writable
|
||||
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||
// internally, and returns false if there's a lot of pending writes
|
||||
// buffered up. When you call read(), that calls _read(n) until
|
||||
// there's enough pending readable data buffered up.
|
||||
//
|
||||
// In a transform stream, the written data is placed in a buffer. When
|
||||
// _read(n) is called, it transforms the queued up data, calling the
|
||||
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||
// written chunk would result in multiple output chunks, then the first
|
||||
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||
//
|
||||
// This way, back-pressure is actually determined by the reading side,
|
||||
// since _read has to be called to start processing a new chunk. However,
|
||||
// a pathological inflate type of transform can cause excessive buffering
|
||||
// here. For example, imagine a stream where every byte of input is
|
||||
// interpreted as an integer from 0-255, and then results in that many
|
||||
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||
// 1kb of data being output. In this case, you could write a very small
|
||||
// amount of input, and end up with a very large amount of output. In
|
||||
// such a pathological inflating mechanism, there'd be no way to tell
|
||||
// the system to stop doing the transform. A single 4MB write could
|
||||
// cause the system to run out of memory.
|
||||
//
|
||||
// However, even in such a pathological case, only a single written chunk
|
||||
// would be consumed, and then the rest would wait (un-transformed) until
|
||||
// the results of the previous transformed chunk were consumed.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = Transform;
|
||||
|
||||
const Duplex = require('_stream_duplex');
|
||||
const util = require('util');
|
||||
util.inherits(Transform, Duplex);
|
||||
|
||||
|
||||
function TransformState(stream) {
|
||||
this.afterTransform = function(er, data) {
|
||||
return afterTransform(stream, er, data);
|
||||
};
|
||||
|
||||
this.needTransform = false;
|
||||
this.transforming = false;
|
||||
this.writecb = null;
|
||||
this.writechunk = null;
|
||||
this.writeencoding = null;
|
||||
}
|
||||
|
||||
function afterTransform(stream, er, data) {
|
||||
var ts = stream._transformState;
|
||||
ts.transforming = false;
|
||||
|
||||
var cb = ts.writecb;
|
||||
|
||||
if (!cb)
|
||||
return stream.emit('error', new Error('no writecb in Transform class'));
|
||||
|
||||
ts.writechunk = null;
|
||||
ts.writecb = null;
|
||||
|
||||
if (data !== null && data !== undefined)
|
||||
stream.push(data);
|
||||
|
||||
cb(er);
|
||||
|
||||
var rs = stream._readableState;
|
||||
rs.reading = false;
|
||||
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||||
stream._read(rs.highWaterMark);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Transform(options) {
|
||||
if (!(this instanceof Transform))
|
||||
return new Transform(options);
|
||||
|
||||
Duplex.call(this, options);
|
||||
|
||||
this._transformState = new TransformState(this);
|
||||
|
||||
// when the writable side finishes, then flush out anything remaining.
|
||||
var stream = this;
|
||||
|
||||
// start out asking for a readable event once data is transformed.
|
||||
this._readableState.needReadable = true;
|
||||
|
||||
// we have implemented the _read method, and done the other things
|
||||
// that Readable wants before the first _read call, so unset the
|
||||
// sync guard flag.
|
||||
this._readableState.sync = false;
|
||||
|
||||
if (options) {
|
||||
if (typeof options.transform === 'function')
|
||||
this._transform = options.transform;
|
||||
|
||||
if (typeof options.flush === 'function')
|
||||
this._flush = options.flush;
|
||||
}
|
||||
|
||||
this.once('prefinish', function() {
|
||||
if (typeof this._flush === 'function')
|
||||
this._flush(function(er, data) {
|
||||
done(stream, er, data);
|
||||
});
|
||||
else
|
||||
done(stream);
|
||||
});
|
||||
}
|
||||
|
||||
Transform.prototype.push = function(chunk, encoding) {
|
||||
this._transformState.needTransform = false;
|
||||
return Duplex.prototype.push.call(this, chunk, encoding);
|
||||
};
|
||||
|
||||
// This is the part where you do stuff!
|
||||
// override this function in implementation classes.
|
||||
// 'chunk' is an input chunk.
|
||||
//
|
||||
// Call `push(newChunk)` to pass along transformed output
|
||||
// to the readable side. You may call 'push' zero or more times.
|
||||
//
|
||||
// Call `cb(err)` when you are done with this chunk. If you pass
|
||||
// an error, then that'll put the hurt on the whole operation. If you
|
||||
// never call cb(), then you'll never get another chunk.
|
||||
Transform.prototype._transform = function(chunk, encoding, cb) {
|
||||
throw new Error('Not implemented');
|
||||
};
|
||||
|
||||
Transform.prototype._write = function(chunk, encoding, cb) {
|
||||
var ts = this._transformState;
|
||||
ts.writecb = cb;
|
||||
ts.writechunk = chunk;
|
||||
ts.writeencoding = encoding;
|
||||
if (!ts.transforming) {
|
||||
var rs = this._readableState;
|
||||
if (ts.needTransform ||
|
||||
rs.needReadable ||
|
||||
rs.length < rs.highWaterMark)
|
||||
this._read(rs.highWaterMark);
|
||||
}
|
||||
};
|
||||
|
||||
// Doesn't matter what the args are here.
|
||||
// _transform does all the work.
|
||||
// That we got here means that the readable side wants more data.
|
||||
Transform.prototype._read = function(n) {
|
||||
var ts = this._transformState;
|
||||
|
||||
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
|
||||
ts.transforming = true;
|
||||
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||||
} else {
|
||||
// mark that we need a transform, so that any data that comes in
|
||||
// will get processed, now that we've asked for it.
|
||||
ts.needTransform = true;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
function done(stream, er, data) {
|
||||
if (er)
|
||||
return stream.emit('error', er);
|
||||
|
||||
if (data !== null && data !== undefined)
|
||||
stream.push(data);
|
||||
|
||||
// if there's nothing in the write buffer, then that means
|
||||
// that nothing more will ever be provided
|
||||
var ws = stream._writableState;
|
||||
var ts = stream._transformState;
|
||||
|
||||
if (ws.length)
|
||||
throw new Error('Calling transform done when ws.length != 0');
|
||||
|
||||
if (ts.transforming)
|
||||
throw new Error('Calling transform done when still transforming');
|
||||
|
||||
return stream.push(null);
|
||||
}
|
530
contrib/mORMot/SyNode/core_modules/node_modules/_stream_writable.js
generated
vendored
Normal file
530
contrib/mORMot/SyNode/core_modules/node_modules/_stream_writable.js
generated
vendored
Normal file
@@ -0,0 +1,530 @@
|
||||
// A bit simpler than readable streams.
|
||||
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
|
||||
// the drain event emission and buffering.
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = Writable;
|
||||
Writable.WritableState = WritableState;
|
||||
|
||||
const util = require('util');
|
||||
const internalUtil = require('internal/util');
|
||||
const Stream = require('stream');
|
||||
const Buffer = require('buffer').Buffer;
|
||||
|
||||
util.inherits(Writable, Stream);
|
||||
|
||||
function nop() {}
|
||||
|
||||
function WriteReq(chunk, encoding, cb) {
|
||||
this.chunk = chunk;
|
||||
this.encoding = encoding;
|
||||
this.callback = cb;
|
||||
this.next = null;
|
||||
}
|
||||
|
||||
function WritableState(options, stream) {
|
||||
options = options || {};
|
||||
|
||||
// object stream flag to indicate whether or not this stream
|
||||
// contains buffers or objects.
|
||||
this.objectMode = !!options.objectMode;
|
||||
|
||||
if (stream instanceof Stream.Duplex)
|
||||
this.objectMode = this.objectMode || !!options.writableObjectMode;
|
||||
|
||||
// the point at which write() starts returning false
|
||||
// Note: 0 is a valid value, means that we always return false if
|
||||
// the entire buffer is not flushed immediately on write()
|
||||
var hwm = options.highWaterMark;
|
||||
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
||||
this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
|
||||
|
||||
// cast to ints.
|
||||
this.highWaterMark = ~~this.highWaterMark;
|
||||
|
||||
this.needDrain = false;
|
||||
// at the start of calling end()
|
||||
this.ending = false;
|
||||
// when end() has been called, and returned
|
||||
this.ended = false;
|
||||
// when 'finish' is emitted
|
||||
this.finished = false;
|
||||
|
||||
// should we decode strings into buffers before passing to _write?
|
||||
// this is here so that some node-core streams can optimize string
|
||||
// handling at a lower level.
|
||||
var noDecode = options.decodeStrings === false;
|
||||
this.decodeStrings = !noDecode;
|
||||
|
||||
// Crypto is kind of old and crusty. Historically, its default string
|
||||
// encoding is 'binary' so we have to make this configurable.
|
||||
// Everything else in the universe uses 'utf8', though.
|
||||
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||||
|
||||
// not an actual buffer we keep track of, but a measurement
|
||||
// of how much we're waiting to get pushed to some underlying
|
||||
// socket or file.
|
||||
this.length = 0;
|
||||
|
||||
// a flag to see when we're in the middle of a write.
|
||||
this.writing = false;
|
||||
|
||||
// when true all writes will be buffered until .uncork() call
|
||||
this.corked = 0;
|
||||
|
||||
// a flag to be able to tell if the onwrite cb is called immediately,
|
||||
// or on a later tick. We set this to true at first, because any
|
||||
// actions that shouldn't happen until "later" should generally also
|
||||
// not happen before the first write call.
|
||||
this.sync = true;
|
||||
|
||||
// a flag to know if we're processing previously buffered items, which
|
||||
// may call the _write() callback in the same tick, so that we don't
|
||||
// end up in an overlapped onwrite situation.
|
||||
this.bufferProcessing = false;
|
||||
|
||||
// the callback that's passed to _write(chunk,cb)
|
||||
this.onwrite = function(er) {
|
||||
onwrite(stream, er);
|
||||
};
|
||||
|
||||
// the callback that the user supplies to write(chunk,encoding,cb)
|
||||
this.writecb = null;
|
||||
|
||||
// the amount that is being written when _write is called.
|
||||
this.writelen = 0;
|
||||
|
||||
this.bufferedRequest = null;
|
||||
this.lastBufferedRequest = null;
|
||||
|
||||
// number of pending user-supplied write callbacks
|
||||
// this must be 0 before 'finish' can be emitted
|
||||
this.pendingcb = 0;
|
||||
|
||||
// emit prefinish if the only thing we're waiting for is _write cbs
|
||||
// This is relevant for synchronous Transform streams
|
||||
this.prefinished = false;
|
||||
|
||||
// True if the error was already emitted and should not be thrown again
|
||||
this.errorEmitted = false;
|
||||
|
||||
// count buffered requests
|
||||
this.bufferedRequestCount = 0;
|
||||
|
||||
// allocate the first CorkedRequest, there is always
|
||||
// one allocated and free to use, and we maintain at most two
|
||||
this.corkedRequestsFree = new CorkedRequest(this);
|
||||
}
|
||||
|
||||
WritableState.prototype.getBuffer = function writableStateGetBuffer() {
|
||||
var current = this.bufferedRequest;
|
||||
var out = [];
|
||||
while (current) {
|
||||
out.push(current);
|
||||
current = current.next;
|
||||
}
|
||||
return out;
|
||||
};
|
||||
|
||||
Object.defineProperty(WritableState.prototype, 'buffer', {
|
||||
get: internalUtil.deprecate(function() {
|
||||
return this.getBuffer();
|
||||
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' +
|
||||
'instead.')
|
||||
});
|
||||
|
||||
function Writable(options) {
|
||||
// Writable ctor is applied to Duplexes, though they're not
|
||||
// instanceof Writable, they're instanceof Readable.
|
||||
if (!(this instanceof Writable) && !(this instanceof Stream.Duplex))
|
||||
return new Writable(options);
|
||||
|
||||
this._writableState = new WritableState(options, this);
|
||||
|
||||
// legacy.
|
||||
this.writable = true;
|
||||
|
||||
if (options) {
|
||||
if (typeof options.write === 'function')
|
||||
this._write = options.write;
|
||||
|
||||
if (typeof options.writev === 'function')
|
||||
this._writev = options.writev;
|
||||
}
|
||||
|
||||
Stream.call(this);
|
||||
}
|
||||
|
||||
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||
Writable.prototype.pipe = function() {
|
||||
this.emit('error', new Error('Cannot pipe, not readable'));
|
||||
};
|
||||
|
||||
|
||||
function writeAfterEnd(stream, cb) {
|
||||
var er = new Error('write after end');
|
||||
// TODO: defer error events consistently everywhere, not just the cb
|
||||
stream.emit('error', er);
|
||||
process.nextTick(cb, er);
|
||||
}
|
||||
|
||||
// If we get something that is not a buffer, string, null, or undefined,
|
||||
// and we're not in objectMode, then that's an error.
|
||||
// Otherwise stream chunks are all considered to be of length=1, and the
|
||||
// watermarks determine how many objects to keep in the buffer, rather than
|
||||
// how many bytes or characters.
|
||||
function validChunk(stream, state, chunk, cb) {
|
||||
var valid = true;
|
||||
var er = false;
|
||||
// Always throw error if a null is written
|
||||
// if we are not in object mode then throw
|
||||
// if it is not a buffer, string, or undefined.
|
||||
if (chunk === null) {
|
||||
er = new TypeError('May not write null values to stream');
|
||||
} else if (!(chunk instanceof Buffer) &&
|
||||
typeof chunk !== 'string' &&
|
||||
chunk !== undefined &&
|
||||
!state.objectMode) {
|
||||
er = new TypeError('Invalid non-string/buffer chunk');
|
||||
}
|
||||
if (er) {
|
||||
stream.emit('error', er);
|
||||
process.nextTick(cb, er);
|
||||
valid = false;
|
||||
}
|
||||
return valid;
|
||||
}
|
||||
|
||||
Writable.prototype.write = function(chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
var ret = false;
|
||||
|
||||
if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (chunk instanceof Buffer)
|
||||
encoding = 'buffer';
|
||||
else if (!encoding)
|
||||
encoding = state.defaultEncoding;
|
||||
|
||||
if (typeof cb !== 'function')
|
||||
cb = nop;
|
||||
|
||||
if (state.ended)
|
||||
writeAfterEnd(this, cb);
|
||||
else if (validChunk(this, state, chunk, cb)) {
|
||||
state.pendingcb++;
|
||||
ret = writeOrBuffer(this, state, chunk, encoding, cb);
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
Writable.prototype.cork = function() {
|
||||
var state = this._writableState;
|
||||
|
||||
state.corked++;
|
||||
};
|
||||
|
||||
Writable.prototype.uncork = function() {
|
||||
var state = this._writableState;
|
||||
|
||||
if (state.corked) {
|
||||
state.corked--;
|
||||
|
||||
if (!state.writing &&
|
||||
!state.corked &&
|
||||
!state.finished &&
|
||||
!state.bufferProcessing &&
|
||||
state.bufferedRequest)
|
||||
clearBuffer(this, state);
|
||||
}
|
||||
};
|
||||
|
||||
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
||||
// node::ParseEncoding() requires lower case.
|
||||
if (typeof encoding === 'string')
|
||||
encoding = encoding.toLowerCase();
|
||||
if (!Buffer.isEncoding(encoding))
|
||||
throw new TypeError('Unknown encoding: ' + encoding);
|
||||
this._writableState.defaultEncoding = encoding;
|
||||
return this;
|
||||
};
|
||||
|
||||
function decodeChunk(state, chunk, encoding) {
|
||||
if (!state.objectMode &&
|
||||
state.decodeStrings !== false &&
|
||||
typeof chunk === 'string') {
|
||||
chunk = Buffer.from(chunk, encoding);
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
|
||||
// if we're already writing something, then just put this
|
||||
// in the queue, and wait our turn. Otherwise, call _write
|
||||
// If we return false, then we need a drain event, so set that flag.
|
||||
function writeOrBuffer(stream, state, chunk, encoding, cb) {
|
||||
chunk = decodeChunk(state, chunk, encoding);
|
||||
|
||||
if (chunk instanceof Buffer)
|
||||
encoding = 'buffer';
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
|
||||
state.length += len;
|
||||
|
||||
var ret = state.length < state.highWaterMark;
|
||||
// we must ensure that previous needDrain will not be reset to false.
|
||||
if (!ret)
|
||||
state.needDrain = true;
|
||||
|
||||
if (state.writing || state.corked) {
|
||||
var last = state.lastBufferedRequest;
|
||||
state.lastBufferedRequest = new WriteReq(chunk, encoding, cb);
|
||||
if (last) {
|
||||
last.next = state.lastBufferedRequest;
|
||||
} else {
|
||||
state.bufferedRequest = state.lastBufferedRequest;
|
||||
}
|
||||
state.bufferedRequestCount += 1;
|
||||
} else {
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
||||
state.writelen = len;
|
||||
state.writecb = cb;
|
||||
state.writing = true;
|
||||
state.sync = true;
|
||||
if (writev)
|
||||
stream._writev(chunk, state.onwrite);
|
||||
else
|
||||
stream._write(chunk, encoding, state.onwrite);
|
||||
state.sync = false;
|
||||
}
|
||||
|
||||
function onwriteError(stream, state, sync, er, cb) {
|
||||
--state.pendingcb;
|
||||
if (sync)
|
||||
process.nextTick(cb, er);
|
||||
else
|
||||
cb(er);
|
||||
|
||||
stream._writableState.errorEmitted = true;
|
||||
stream.emit('error', er);
|
||||
}
|
||||
|
||||
function onwriteStateUpdate(state) {
|
||||
state.writing = false;
|
||||
state.writecb = null;
|
||||
state.length -= state.writelen;
|
||||
state.writelen = 0;
|
||||
}
|
||||
|
||||
function onwrite(stream, er) {
|
||||
var state = stream._writableState;
|
||||
var sync = state.sync;
|
||||
var cb = state.writecb;
|
||||
|
||||
onwriteStateUpdate(state);
|
||||
|
||||
if (er)
|
||||
onwriteError(stream, state, sync, er, cb);
|
||||
else {
|
||||
// Check if we're actually ready to finish, but don't emit yet
|
||||
var finished = needFinish(state);
|
||||
|
||||
if (!finished &&
|
||||
!state.corked &&
|
||||
!state.bufferProcessing &&
|
||||
state.bufferedRequest) {
|
||||
clearBuffer(stream, state);
|
||||
}
|
||||
|
||||
if (sync) {
|
||||
process.nextTick(afterWrite, stream, state, finished, cb);
|
||||
} else {
|
||||
afterWrite(stream, state, finished, cb);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function afterWrite(stream, state, finished, cb) {
|
||||
if (!finished)
|
||||
onwriteDrain(stream, state);
|
||||
state.pendingcb--;
|
||||
cb();
|
||||
finishMaybe(stream, state);
|
||||
}
|
||||
|
||||
// Must force callback to be called on nextTick, so that we don't
|
||||
// emit 'drain' before the write() consumer gets the 'false' return
|
||||
// value, and has a chance to attach a 'drain' listener.
|
||||
function onwriteDrain(stream, state) {
|
||||
if (state.length === 0 && state.needDrain) {
|
||||
state.needDrain = false;
|
||||
stream.emit('drain');
|
||||
}
|
||||
}
|
||||
|
||||
// if there's something in the buffer waiting, then process it
|
||||
function clearBuffer(stream, state) {
|
||||
state.bufferProcessing = true;
|
||||
var entry = state.bufferedRequest;
|
||||
|
||||
if (stream._writev && entry && entry.next) {
|
||||
// Fast case, write everything using _writev()
|
||||
var l = state.bufferedRequestCount;
|
||||
var buffer = new Array(l);
|
||||
var holder = state.corkedRequestsFree;
|
||||
holder.entry = entry;
|
||||
|
||||
var count = 0;
|
||||
while (entry) {
|
||||
buffer[count] = entry;
|
||||
entry = entry.next;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
||||
|
||||
// doWrite is almost always async, defer these to save a bit of time
|
||||
// as the hot path ends with doWrite
|
||||
state.pendingcb++;
|
||||
state.lastBufferedRequest = null;
|
||||
if (holder.next) {
|
||||
state.corkedRequestsFree = holder.next;
|
||||
holder.next = null;
|
||||
} else {
|
||||
state.corkedRequestsFree = new CorkedRequest(state);
|
||||
}
|
||||
} else {
|
||||
// Slow case, write chunks one-by-one
|
||||
while (entry) {
|
||||
var chunk = entry.chunk;
|
||||
var encoding = entry.encoding;
|
||||
var cb = entry.callback;
|
||||
var len = state.objectMode ? 1 : chunk.length;
|
||||
|
||||
doWrite(stream, state, false, len, chunk, encoding, cb);
|
||||
entry = entry.next;
|
||||
// if we didn't call the onwrite immediately, then
|
||||
// it means that we need to wait until it does.
|
||||
// also, that means that the chunk and cb are currently
|
||||
// being processed, so move the buffer counter past them.
|
||||
if (state.writing) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (entry === null)
|
||||
state.lastBufferedRequest = null;
|
||||
}
|
||||
|
||||
state.bufferedRequestCount = 0;
|
||||
state.bufferedRequest = entry;
|
||||
state.bufferProcessing = false;
|
||||
}
|
||||
|
||||
Writable.prototype._write = function(chunk, encoding, cb) {
|
||||
cb(new Error('_write() method is not implemented'));
|
||||
};
|
||||
|
||||
Writable.prototype._writev = null;
|
||||
|
||||
Writable.prototype.end = function(chunk, encoding, cb) {
|
||||
var state = this._writableState;
|
||||
|
||||
if (typeof chunk === 'function') {
|
||||
cb = chunk;
|
||||
chunk = null;
|
||||
encoding = null;
|
||||
} else if (typeof encoding === 'function') {
|
||||
cb = encoding;
|
||||
encoding = null;
|
||||
}
|
||||
|
||||
if (chunk !== null && chunk !== undefined)
|
||||
this.write(chunk, encoding);
|
||||
|
||||
// .end() fully uncorks
|
||||
if (state.corked) {
|
||||
state.corked = 1;
|
||||
this.uncork();
|
||||
}
|
||||
|
||||
// ignore unnecessary end() calls.
|
||||
if (!state.ending && !state.finished)
|
||||
endWritable(this, state, cb);
|
||||
};
|
||||
|
||||
|
||||
function needFinish(state) {
|
||||
return (state.ending &&
|
||||
state.length === 0 &&
|
||||
state.bufferedRequest === null &&
|
||||
!state.finished &&
|
||||
!state.writing);
|
||||
}
|
||||
|
||||
function prefinish(stream, state) {
|
||||
if (!state.prefinished) {
|
||||
state.prefinished = true;
|
||||
stream.emit('prefinish');
|
||||
}
|
||||
}
|
||||
|
||||
function finishMaybe(stream, state) {
|
||||
var need = needFinish(state);
|
||||
if (need) {
|
||||
if (state.pendingcb === 0) {
|
||||
prefinish(stream, state);
|
||||
state.finished = true;
|
||||
stream.emit('finish');
|
||||
} else {
|
||||
prefinish(stream, state);
|
||||
}
|
||||
}
|
||||
return need;
|
||||
}
|
||||
|
||||
function endWritable(stream, state, cb) {
|
||||
state.ending = true;
|
||||
finishMaybe(stream, state);
|
||||
if (cb) {
|
||||
if (state.finished)
|
||||
process.nextTick(cb);
|
||||
else
|
||||
stream.once('finish', cb);
|
||||
}
|
||||
state.ended = true;
|
||||
stream.writable = false;
|
||||
}
|
||||
|
||||
// It seems a linked list but it is not
|
||||
// there will be only 2 of these for each stream
|
||||
function CorkedRequest(state) {
|
||||
this.next = null;
|
||||
this.entry = null;
|
||||
|
||||
this.finish = (err) => {
|
||||
var entry = this.entry;
|
||||
this.entry = null;
|
||||
while (entry) {
|
||||
var cb = entry.callback;
|
||||
state.pendingcb--;
|
||||
cb(err);
|
||||
entry = entry.next;
|
||||
}
|
||||
if (state.corkedRequestsFree) {
|
||||
state.corkedRequestsFree.next = this;
|
||||
} else {
|
||||
state.corkedRequestsFree = this;
|
||||
}
|
||||
};
|
||||
}
|
481
contrib/mORMot/SyNode/core_modules/node_modules/assert.js
generated
vendored
Normal file
481
contrib/mORMot/SyNode/core_modules/node_modules/assert.js
generated
vendored
Normal file
@@ -0,0 +1,481 @@
|
||||
// http://wiki.commonjs.org/wiki/Unit_Testing/1.0
|
||||
//
|
||||
// THIS IS NOT TESTED NOR LIKELY TO WORK OUTSIDE V8!
|
||||
//
|
||||
// Originally from narwhal.js (http://narwhaljs.org)
|
||||
// Copyright (c) 2009 Thomas Robinson <280north.com>
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the 'Software'), to
|
||||
// deal in the Software without restriction, including without limitation the
|
||||
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
// sell copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in
|
||||
// all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Assertions
|
||||
* @module assert
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
// UTILITY
|
||||
const compare = process.binding('buffer').compare;
|
||||
const util = require('util');
|
||||
const Buffer = require('buffer').Buffer;
|
||||
const pToString = (obj) => Object.prototype.toString.call(obj);
|
||||
|
||||
// 1. The assert module provides functions that throw
|
||||
// AssertionError's when particular conditions are not met. The
|
||||
// assert module must conform to the following interface.
|
||||
|
||||
const assert = module.exports = ok;
|
||||
|
||||
// 2. The AssertionError is defined in assert.
|
||||
// new assert.AssertionError({ message: message,
|
||||
// actual: actual,
|
||||
// expected: expected })
|
||||
|
||||
assert.AssertionError = function AssertionError(options) {
|
||||
this.name = 'AssertionError';
|
||||
this.actual = options.actual;
|
||||
this.expected = options.expected;
|
||||
this.operator = options.operator;
|
||||
|
||||
if (options.message) {
|
||||
this.message = options.message;
|
||||
this.generatedMessage = false;
|
||||
} else {
|
||||
this.message = getMessage(this);
|
||||
this.generatedMessage = true;
|
||||
}
|
||||
var stackStartFunction = options.stackStartFunction || fail;
|
||||
if (Error.captureStackTrace) {
|
||||
// Chrome and NodeJS
|
||||
Error.captureStackTrace(this, stackStartFunction);
|
||||
} else {
|
||||
// FF, IE 10+ and Safari 6+. Fallback for others
|
||||
let tmp_stack = (new Error).stack.split("\n").slice(1),
|
||||
re = /^(.*?)@(.*?):(.*?)$/.exec(tmp_stack[1]); //[undef, undef, this.fileName, this.lineNumber] = re
|
||||
this.fileName = re[2];
|
||||
this.lineNumber = re[3];
|
||||
this.stack = tmp_stack.join("\n");
|
||||
}
|
||||
};
|
||||
|
||||
// assert.AssertionError instanceof Error
|
||||
util.inherits(assert.AssertionError, Error);
|
||||
|
||||
function truncate(s, n) {
|
||||
return s.slice(0, n);
|
||||
}
|
||||
|
||||
function getMessage(self) {
|
||||
return truncate(util.inspect(self.actual), 128) + ' ' +
|
||||
self.operator + ' ' +
|
||||
truncate(util.inspect(self.expected), 128);
|
||||
}
|
||||
|
||||
// At present only the three keys mentioned above are used and
|
||||
// understood by the spec. Implementations or sub modules can pass
|
||||
// other keys to the AssertionError's constructor - they will be
|
||||
// ignored.
|
||||
|
||||
// 3. All of the following functions must throw an AssertionError
|
||||
// when a corresponding condition is not met, with a message that
|
||||
// may be undefined if not provided. All assertion methods provide
|
||||
// both the actual and expected values to the assertion error for
|
||||
// display purposes.
|
||||
|
||||
/**
|
||||
* Throws an exception that displays the values for actual and expected separated by the provided operator.
|
||||
* @param actual
|
||||
* @param expected
|
||||
* @param message
|
||||
* @param operator
|
||||
* @param stackStartFunction
|
||||
*/
|
||||
function fail(actual, expected, message, operator, stackStartFunction) {
|
||||
throw new assert.AssertionError({
|
||||
message: message,
|
||||
actual: actual,
|
||||
expected: expected,
|
||||
operator: operator,
|
||||
stackStartFunction: stackStartFunction
|
||||
});
|
||||
}
|
||||
|
||||
// EXTENSION! allows for well behaved errors defined elsewhere.
|
||||
assert.fail = fail;
|
||||
|
||||
// 4. Pure assertion tests whether a value is truthy, as determined
|
||||
// by !!guard.
|
||||
// assert.ok(guard, message_opt);
|
||||
// This statement is equivalent to assert.equal(true, !!guard,
|
||||
// message_opt);. To test strictly for the value true, use
|
||||
// assert.strictEqual(true, guard, message_opt);.
|
||||
/**
|
||||
* Tests if value is truthy, it is equivalent to assert.equal(true, !!value, message);
|
||||
* @param value
|
||||
* @param message
|
||||
*/
|
||||
function ok(value, message) {
|
||||
if (!value) fail(value, true, message, '==', assert.ok);
|
||||
}
|
||||
assert.ok = ok;
|
||||
|
||||
// 5. The equality assertion tests shallow, coercive equality with
|
||||
// ==.
|
||||
// assert.equal(actual, expected, message_opt);
|
||||
|
||||
/**
|
||||
* Tests shallow, coercive equality with the equal comparison operator ( == ).
|
||||
* @param actual
|
||||
* @param expected
|
||||
* @param {String} [message]
|
||||
*/
|
||||
module.exports.equal = function equal(actual, expected, message) {
|
||||
if (actual != expected) fail(actual, expected, message, '==', assert.equal);
|
||||
};
|
||||
|
||||
// 6. The non-equality assertion tests for whether two objects are not equal
|
||||
// with != assert.notEqual(actual, expected, message_opt);
|
||||
|
||||
/**
|
||||
* Tests shallow, coercive non-equality with the not equal comparison operator ( != ).
|
||||
* @param actual
|
||||
* @param expected
|
||||
* @param [message]
|
||||
*/
|
||||
module.exports.notEqual = function notEqual(actual, expected, message) {
|
||||
if (actual == expected) {
|
||||
fail(actual, expected, message, '!=', assert.notEqual);
|
||||
}
|
||||
};
|
||||
|
||||
// 7. The equivalence assertion tests a deep equality relation.
|
||||
// assert.deepEqual(actual, expected, message_opt);
|
||||
|
||||
/**
|
||||
* Tests for deep equality.
|
||||
* @param actual
|
||||
* @param expected
|
||||
* @param [message]
|
||||
*/
|
||||
module.exports.deepEqual = function deepEqual(actual, expected, message) {
|
||||
if (!_deepEqual(actual, expected, false)) {
|
||||
fail(actual, expected, message, 'deepEqual', assert.deepEqual);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Generally identical to assert.deepEqual() with two exceptions.
|
||||
* First, primitive values are compared using the strict equality operator ( === ).
|
||||
* Second, object comparisons include a strict equality check of their prototypes.
|
||||
* @param actual
|
||||
* @param expected
|
||||
* @param [message]
|
||||
*/
|
||||
assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) {
|
||||
if (!_deepEqual(actual, expected, true)) {
|
||||
fail(actual, expected, message, 'deepStrictEqual', assert.deepStrictEqual);
|
||||
}
|
||||
};
|
||||
|
||||
function _deepEqual(actual, expected, strict, memos) {
|
||||
// 7.1. All identical values are equivalent, as determined by ===.
|
||||
if (actual === expected) {
|
||||
return true;
|
||||
} else if (actual instanceof Buffer && expected instanceof Buffer) {
|
||||
return compare(actual, expected) === 0;
|
||||
// UB SEPCIFIC
|
||||
} else if (actual instanceof ArrayBuffer && expected instanceof ArrayBuffer) {
|
||||
if (actual.byteLength != expected.byteLength) return false;
|
||||
var aBuf = new Uint8Array(actual), eBuf = new Uint8Array(expected);
|
||||
|
||||
for (var i = 0; i < aBuf.length; i++) {
|
||||
if (aBuf[i] !== eBuf[i]) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
// 7.2. If the expected value is a Date object, the actual value is
|
||||
// equivalent if it is also a Date object that refers to the same time.
|
||||
} else if (util.isDate(actual) && util.isDate(expected)) {
|
||||
return actual.getTime() === expected.getTime();
|
||||
|
||||
// 7.3 If the expected value is a RegExp object, the actual value is
|
||||
// equivalent if it is also a RegExp object with the same source and
|
||||
// properties (`global`, `multiline`, `lastIndex`, `ignoreCase`).
|
||||
} else if (util.isRegExp(actual) && util.isRegExp(expected)) {
|
||||
return actual.source === expected.source &&
|
||||
actual.global === expected.global &&
|
||||
actual.multiline === expected.multiline &&
|
||||
actual.lastIndex === expected.lastIndex &&
|
||||
actual.ignoreCase === expected.ignoreCase;
|
||||
|
||||
// 7.4. Other pairs that do not both pass typeof value == 'object',
|
||||
// equivalence is determined by ==.
|
||||
} else if ((actual === null || typeof actual !== 'object') &&
|
||||
(expected === null || typeof expected !== 'object')) {
|
||||
return strict ? actual === expected : actual == expected;
|
||||
|
||||
// If both values are instances of typed arrays, wrap their underlying
|
||||
// ArrayBuffers in a Buffer each to increase performance
|
||||
// This optimization requires the arrays to have the same type as checked by
|
||||
// Object.prototype.toString (aka pToString). Never perform binary
|
||||
// comparisons for Float*Arrays, though, since e.g. +0 === -0 but their
|
||||
// bit patterns are not identical.
|
||||
} else if (ArrayBuffer.isView(actual) && ArrayBuffer.isView(expected) &&
|
||||
pToString(actual) === pToString(expected) &&
|
||||
!(actual instanceof Float32Array ||
|
||||
actual instanceof Float64Array)) {
|
||||
return compare(Buffer.from(actual.buffer,
|
||||
actual.byteOffset,
|
||||
actual.byteLength),
|
||||
Buffer.from(expected.buffer,
|
||||
expected.byteOffset,
|
||||
expected.byteLength)) === 0;
|
||||
|
||||
// 7.5 For all other Object pairs, including Array objects, equivalence is
|
||||
// determined by having the same number of owned properties (as verified
|
||||
// with Object.prototype.hasOwnProperty.call), the same set of keys
|
||||
// (although not necessarily the same order), equivalent values for every
|
||||
// corresponding key, and an identical 'prototype' property. Note: this
|
||||
// accounts for both named and indexed properties on Arrays.
|
||||
} else {
|
||||
memos = memos || {actual: [], expected: []};
|
||||
|
||||
const actualIndex = memos.actual.indexOf(actual);
|
||||
if (actualIndex !== -1) {
|
||||
if (actualIndex === memos.expected.indexOf(expected)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
memos.actual.push(actual);
|
||||
memos.expected.push(expected);
|
||||
|
||||
return objEquiv(actual, expected, strict, memos);
|
||||
}
|
||||
}
|
||||
|
||||
function isArguments(object) {
|
||||
return Object.prototype.toString.call(object) == '[object Arguments]';
|
||||
}
|
||||
|
||||
function objEquiv(a, b, strict, actualVisitedObjects) {
|
||||
if (a === null || a === undefined || b === null || b === undefined)
|
||||
return false;
|
||||
// if one is a primitive, the other must be same
|
||||
if (util.isPrimitive(a) || util.isPrimitive(b))
|
||||
return a === b;
|
||||
if (strict && Object.getPrototypeOf(a) !== Object.getPrototypeOf(b))
|
||||
return false;
|
||||
const aIsArgs = isArguments(a);
|
||||
const bIsArgs = isArguments(b);
|
||||
if ((aIsArgs && !bIsArgs) || (!aIsArgs && bIsArgs))
|
||||
return false;
|
||||
const ka = Object.keys(a);
|
||||
const kb = Object.keys(b);
|
||||
var key, i;
|
||||
// having the same number of owned properties (keys incorporates
|
||||
// hasOwnProperty)
|
||||
if (ka.length !== kb.length)
|
||||
return false;
|
||||
//the same set of keys (although not necessarily the same order),
|
||||
ka.sort();
|
||||
kb.sort();
|
||||
//~~~cheap key test
|
||||
for (i = ka.length - 1; i >= 0; i--) {
|
||||
if (ka[i] !== kb[i])
|
||||
return false;
|
||||
}
|
||||
//equivalent values for every corresponding key, and
|
||||
//~~~possibly expensive deep test
|
||||
for (i = ka.length - 1; i >= 0; i--) {
|
||||
key = ka[i];
|
||||
if (!_deepEqual(a[key], b[key], strict, actualVisitedObjects))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// 8. The non-equivalence assertion tests for any deep inequality.
|
||||
// assert.notDeepEqual(actual, expected, message_opt);
|
||||
|
||||
/**
|
||||
* Tests for any deep inequality.
|
||||
* @param actual
|
||||
* @param expected
|
||||
* @param [message]
|
||||
*/
|
||||
module.exports.notDeepEqual = function notDeepEqual(actual, expected, message) {
|
||||
if (_deepEqual(actual, expected, false)) {
|
||||
fail(actual, expected, message, 'notDeepEqual', assert.notDeepEqual);
|
||||
}
|
||||
};
|
||||
|
||||
assert.notDeepStrictEqual = notDeepStrictEqual;
|
||||
function notDeepStrictEqual(actual, expected, message) {
|
||||
if (_deepEqual(actual, expected, true)) {
|
||||
fail(actual, expected, message, 'notDeepStrictEqual', notDeepStrictEqual);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// 9. The strict equality assertion tests strict equality, as determined by ===.
|
||||
// assert.strictEqual(actual, expected, message_opt);
|
||||
|
||||
assert.strictEqual = function strictEqual(actual, expected, message) {
|
||||
if (actual !== expected) {
|
||||
fail(actual, expected, message, '===', assert.strictEqual);
|
||||
}
|
||||
};
|
||||
|
||||
// 10. The strict non-equality assertion tests for strict inequality, as
|
||||
// determined by !==. assert.notStrictEqual(actual, expected, message_opt);
|
||||
|
||||
assert.notStrictEqual = function notStrictEqual(actual, expected, message) {
|
||||
if (actual === expected) {
|
||||
fail(actual, expected, message, '!==', assert.notStrictEqual);
|
||||
}
|
||||
};
|
||||
|
||||
function expectedException(actual, expected) {
|
||||
if (!actual || !expected) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (Object.prototype.toString.call(expected) == '[object RegExp]') {
|
||||
return expected.test(actual);
|
||||
}
|
||||
|
||||
try {
|
||||
if (actual instanceof expected) {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {
|
||||
// Ignore. The instanceof check doesn't work for arrow functions.
|
||||
}
|
||||
|
||||
if (Error.isPrototypeOf(expected)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return expected.call({}, actual) === true;
|
||||
}
|
||||
|
||||
function _tryBlock(block) {
|
||||
var error;
|
||||
try {
|
||||
block();
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
return error;
|
||||
}
|
||||
|
||||
function _throws(shouldThrow, block, expected, message) {
|
||||
var actual;
|
||||
|
||||
if (typeof block !== 'function') {
|
||||
throw new TypeError('"block" argument must be a function');
|
||||
}
|
||||
|
||||
if (typeof expected === 'string') {
|
||||
message = expected;
|
||||
expected = null;
|
||||
}
|
||||
|
||||
actual = _tryBlock(block);
|
||||
|
||||
message = (expected && expected.name ? ' (' + expected.name + ').' : '.') +
|
||||
(message ? ' ' + message : '.');
|
||||
|
||||
if (shouldThrow && !actual) {
|
||||
fail(actual, expected, 'Missing expected exception' + message);
|
||||
}
|
||||
|
||||
const userProvidedMessage = typeof message === 'string';
|
||||
const isUnwantedException = !shouldThrow && util.isError(actual);
|
||||
const isUnexpectedException = !shouldThrow && actual && !expected;
|
||||
|
||||
if ((isUnwantedException &&
|
||||
userProvidedMessage &&
|
||||
expectedException(actual, expected)) ||
|
||||
isUnexpectedException) {
|
||||
fail(actual, expected, 'Got unwanted exception' + message);
|
||||
}
|
||||
|
||||
if ((shouldThrow && actual && expected &&
|
||||
!expectedException(actual, expected)) || (!shouldThrow && actual)) {
|
||||
throw actual;
|
||||
}
|
||||
}
|
||||
|
||||
// 11. Expected to throw an error:
|
||||
// assert.throws(block, Error_opt, message_opt);
|
||||
/**
|
||||
* Expects block to throw an error. error can be constructor, RegExp or validation function.
|
||||
*
|
||||
* Validate instanceof using constructor:
|
||||
*
|
||||
* assert.throws(function() {
|
||||
* throw new Error("Wrong value");
|
||||
* }, Error);
|
||||
*
|
||||
* Validate error message using RegExp:
|
||||
*
|
||||
* assert.throws(function() {
|
||||
* throw new Error("Wrong value");
|
||||
* }, /error/);
|
||||
*
|
||||
* Custom error validation:
|
||||
*
|
||||
* assert.throws(
|
||||
* function() {
|
||||
* throw new Error("Wrong value");
|
||||
* },
|
||||
* function(err) {
|
||||
* if ( (err instanceof Error) && /value/.test(err) ) {
|
||||
* return true;
|
||||
* }
|
||||
* },
|
||||
* "unexpected error"
|
||||
* );
|
||||
*
|
||||
* @param block
|
||||
* @param [error]
|
||||
* @param [message]
|
||||
*/
|
||||
module.exports.throws = function(block, /*optional*/error, /*optional*/message) {
|
||||
_throws(true, block, error, message);
|
||||
};
|
||||
|
||||
// EXTENSION! This is annoying to write outside this module.
|
||||
/**
|
||||
* Expects block not to throw an error, see assert.throws for details.
|
||||
* @param block
|
||||
* @param [error]
|
||||
* @param [message]
|
||||
*/
|
||||
module.exports.doesNotThrow = function(block, /*optional*/error, /*optional*/message) {
|
||||
_throws(false, block, error, message);
|
||||
};
|
||||
|
||||
/**
|
||||
* Tests if value is not a false value, throws if it is a true value. Useful when testing the first argument, error in callbacks.
|
||||
* @param err
|
||||
*/
|
||||
module.exports.ifError = function(err) { if (err) {throw err;}};
|
1388
contrib/mORMot/SyNode/core_modules/node_modules/buffer.js
generated
vendored
Normal file
1388
contrib/mORMot/SyNode/core_modules/node_modules/buffer.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
7
contrib/mORMot/SyNode/core_modules/node_modules/child_process.js
generated
vendored
Normal file
7
contrib/mORMot/SyNode/core_modules/node_modules/child_process.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/*
|
||||
* Fake implementation of nodejs child_process
|
||||
* Throw on spawn
|
||||
*/
|
||||
module.exports.spawn = function(){
|
||||
throw new Error('Not implemented in SyNode');
|
||||
}
|
182
contrib/mORMot/SyNode/core_modules/node_modules/console.js
generated
vendored
Normal file
182
contrib/mORMot/SyNode/core_modules/node_modules/console.js
generated
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
// Modified by UnityBase core team to be compatible with SyNode
|
||||
|
||||
var util = require('util');
|
||||
|
||||
/**
|
||||
* Console & log output functions
|
||||
* Put something to log with log levels depending on method. In case of GUI server do echo to GUI log (if enabled).
|
||||
* In case of command line - echo to `stdout`.
|
||||
*
|
||||
* Do not create this class directly - use global {@link console} already created by UB.
|
||||
*
|
||||
* console.log('%s is a %s usually with weight less then %dgr', 'apple', 'fruit', 100);
|
||||
* //Will output "apple is a fruit usually with weight less then 100gr"
|
||||
* console.log('apple', 'fruit', 100);
|
||||
* //will output "apple fruit 100"
|
||||
* console.debug('something');
|
||||
* // will output to log only in "Debug" build (UBD.exe)
|
||||
*
|
||||
* Arguments, passed to console output functions are transformed to string using {@link util.format} call.
|
||||
*
|
||||
* @module console
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
/**
|
||||
* Do not create directly, use {@link console} instance from `global`.
|
||||
*
|
||||
* console.debug('Yeh!');
|
||||
*
|
||||
* @class Console
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
function Console(stdout, stderr) {
|
||||
if (!(this instanceof Console)) {
|
||||
return new Console(stdout, stderr);
|
||||
}
|
||||
if (!stdout || typeof stdout.write !== 'function') {
|
||||
throw new TypeError('Console expects a writable stream instance');
|
||||
}
|
||||
if (!stderr) {
|
||||
stderr = stdout;
|
||||
}
|
||||
var prop = {
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
};
|
||||
prop.value = stdout;
|
||||
Object.defineProperty(this, '_stdout', prop);
|
||||
prop.value = stderr;
|
||||
Object.defineProperty(this, '_stderr', prop);
|
||||
prop.value = {};
|
||||
Object.defineProperty(this, '_times', prop);
|
||||
|
||||
// bind the prototype functions to this Console instance
|
||||
Object.keys(Console.prototype).forEach(function(k) {
|
||||
this[k] = this[k].bind(this);
|
||||
}, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Output to log with log level `Info`. Internally use util.format for create output, so
|
||||
* format chars can be used:
|
||||
*
|
||||
* - %s - String.
|
||||
* - %d - Number (both integer and float).
|
||||
* - %j - JSON.
|
||||
* - % - single percent sign ('%'). This does not consume an argument.
|
||||
*
|
||||
* console.log('%s is a %s usually with weight less then %dgr', 'apple', 'fruit', 100);
|
||||
* //Will output "apple is a fruit usually with weight less then 100gr"
|
||||
*
|
||||
* console.log('apple', 'fruit', 100);
|
||||
* //will output "apple fruit 100"
|
||||
*
|
||||
* console.log('the object JSON is %j', {a: 12, b: {inner: 11}});
|
||||
* // will output a JSON object instead of [object Object]
|
||||
*
|
||||
* @param {...*}
|
||||
*/
|
||||
Console.prototype.log = function() {
|
||||
this._stdout.write(util.format.apply(this, arguments) + '\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* Output to log with log level `Debug`. In case {@link process.isDebug} is false - do nothing
|
||||
* @method
|
||||
* @param {...*}
|
||||
*/
|
||||
Console.prototype.debug = process.isDebug ?
|
||||
function() {
|
||||
this._stdout.write(util.format.apply(this, arguments) + '\n', 2); //UB specific
|
||||
} :
|
||||
function() {
|
||||
};
|
||||
|
||||
/**
|
||||
* Output to log with log level `Info` (alias for console.log)
|
||||
* @method
|
||||
* @param {...*}
|
||||
*/
|
||||
Console.prototype.info = Console.prototype.log;
|
||||
|
||||
|
||||
/**
|
||||
* Output to log with log level `Warning`. In case of OS console echo output to stderr
|
||||
* @param {...*}
|
||||
*/
|
||||
Console.prototype.warn = function() {
|
||||
this._stderr.write(util.format.apply(this, arguments) + '\n', 4); //UB specific
|
||||
};
|
||||
|
||||
/**
|
||||
* Output to log with log level `Error`. In case of OS console echo output to stderr
|
||||
* @param {...*}
|
||||
*/
|
||||
Console.prototype.error = function() {
|
||||
this._stderr.write(util.format.apply(this, arguments) + '\n', 5); //UB specific
|
||||
};
|
||||
|
||||
/**
|
||||
* Uses util.inspect on obj and prints resulting string to stdout.
|
||||
* @param {Object} object
|
||||
*/
|
||||
Console.prototype.dir = function(object) {
|
||||
this._stdout.write(util.inspect(object) + '\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* Mark a time.
|
||||
* @param {String} label
|
||||
*/
|
||||
Console.prototype.time = function(label) {
|
||||
this._times[label] = Date.now();
|
||||
};
|
||||
|
||||
/**
|
||||
* Finish timer, record output
|
||||
* @example
|
||||
*
|
||||
* console.time('100-elements');
|
||||
* for (var i = 0; i < 100; i++) {
|
||||
* ;
|
||||
* }
|
||||
* console.timeEnd('100-elements');
|
||||
*
|
||||
* @param {string} label
|
||||
*/
|
||||
Console.prototype.timeEnd = function(label) {
|
||||
var time = this._times[label];
|
||||
if (!time) {
|
||||
throw new Error('No such label: ' + label);
|
||||
}
|
||||
var duration = Date.now() - time;
|
||||
this.log('%s: %dms', label, duration);
|
||||
};
|
||||
|
||||
|
||||
Console.prototype.trace = function() {
|
||||
// TODO probably can to do this better with V8's debug object once that is
|
||||
// exposed.
|
||||
var err = new Error;
|
||||
err.name = 'Trace';
|
||||
err.message = util.format.apply(this, arguments);
|
||||
//MPV Error.captureStackTrace(err, arguments.callee);
|
||||
this.error(err.stack);
|
||||
};
|
||||
|
||||
/**
|
||||
* Similar to {@link assert#ok}, but the error message is formatted as {@link util#format util.format(message...)}.
|
||||
* @param expression
|
||||
*/
|
||||
Console.prototype.assert = function(expression) {
|
||||
if (!expression) {
|
||||
var arr = Array.prototype.slice.call(arguments, 1);
|
||||
require('assert').ok(false, util.format.apply(this, arr));
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = new Console(process.stdout, process.stderr);
|
||||
module.exports.Console = Console;
|
16
contrib/mORMot/SyNode/core_modules/node_modules/crypto.js
generated
vendored
Normal file
16
contrib/mORMot/SyNode/core_modules/node_modules/crypto.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
//fake module
|
||||
|
||||
exports.createHash = exports.Hash = Hash;
|
||||
function Hash(algorithm, options) {
|
||||
if (!(this instanceof Hash))
|
||||
return new Hash(algorithm, options);
|
||||
/* this._binding = new binding.Hash(algorithm);
|
||||
LazyTransform.call(this, options);*/
|
||||
this.fake = true;
|
||||
}
|
||||
|
||||
exports.randomBytes = randomBytes;
|
||||
function randomBytes(size, callback) {
|
||||
return 'zzzzz';
|
||||
}
|
||||
|
117
contrib/mORMot/SyNode/core_modules/node_modules/dns.js
generated
vendored
Normal file
117
contrib/mORMot/SyNode/core_modules/node_modules/dns.js
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
// Fake DNS node module interface
|
||||
// For crequire('dns') compartibility only
|
||||
const NOT_IMPLEMENTED_IN_SYNODE = 'Not implemented in SyNode'
|
||||
|
||||
function lookup() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function lookupService() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function getServers() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function setServers() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolve() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolve4() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolve6() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolveCname() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolveMx() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolveNs() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolveTxt() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolveSrv() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolvePtr() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolveNaptr() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function resolveSoa() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
function reverse() {
|
||||
throw new Error(NOT_IMPLEMENTED_IN_SYNODE)
|
||||
}
|
||||
|
||||
|
||||
module.exports = {
|
||||
lookup,
|
||||
lookupService,
|
||||
getServers,
|
||||
setServers,
|
||||
resolve,
|
||||
resolve4,
|
||||
resolve6,
|
||||
resolveCname,
|
||||
resolveMx,
|
||||
resolveNs,
|
||||
resolveTxt,
|
||||
resolveSrv,
|
||||
resolvePtr,
|
||||
resolveNaptr,
|
||||
resolveSoa,
|
||||
reverse,
|
||||
|
||||
// uv_getaddrinfo flags
|
||||
ADDRCONFIG: 'cares.AI_ADDRCONFIG',
|
||||
V4MAPPED: 'cares.AI_V4MAPPED',
|
||||
|
||||
// ERROR CODES
|
||||
NODATA: 'ENODATA',
|
||||
FORMERR: 'EFORMERR',
|
||||
SERVFAIL: 'ESERVFAIL',
|
||||
NOTFOUND: 'ENOTFOUND',
|
||||
NOTIMP: 'ENOTIMP',
|
||||
REFUSED: 'EREFUSED',
|
||||
BADQUERY: 'EBADQUERY',
|
||||
BADNAME: 'EBADNAME',
|
||||
BADFAMILY: 'EBADFAMILY',
|
||||
BADRESP: 'EBADRESP',
|
||||
CONNREFUSED: 'ECONNREFUSED',
|
||||
TIMEOUT: 'ETIMEOUT',
|
||||
EOF: 'EOF',
|
||||
FILE: 'EFILE',
|
||||
NOMEM: 'ENOMEM',
|
||||
DESTRUCTION: 'EDESTRUCTION',
|
||||
BADSTR: 'EBADSTR',
|
||||
BADFLAGS: 'EBADFLAGS',
|
||||
NONAME: 'ENONAME',
|
||||
BADHINTS: 'EBADHINTS',
|
||||
NOTINITIALIZED: 'ENOTINITIALIZED',
|
||||
LOADIPHLPAPI: 'ELOADIPHLPAPI',
|
||||
ADDRGETNETWORKPARAMS: 'EADDRGETNETWORKPARAMS',
|
||||
CANCELLED: 'ECANCELLED'
|
||||
};
|
561
contrib/mORMot/SyNode/core_modules/node_modules/events.js
generated
vendored
Normal file
561
contrib/mORMot/SyNode/core_modules/node_modules/events.js
generated
vendored
Normal file
@@ -0,0 +1,561 @@
|
||||
'use strict';
|
||||
/**
|
||||
* @module events
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
/**
|
||||
* NodeJS like EventEmitter. See also <a href="http://nodejs.org/api/events.html">NodeJS events documentation</a>
|
||||
*
|
||||
* To add event emitting ability to any object:
|
||||
*
|
||||
|
||||
var myObject = {},
|
||||
//compatibility EventEmitter = require('events').EventEmitter;
|
||||
EventEmitter = require('events');
|
||||
// add EventEmitter to myObject
|
||||
EventEmitter.call(myObject);
|
||||
var util = require('util');
|
||||
util._extend(myObject, EventEmitter.prototype);
|
||||
|
||||
* In case object created via constructor function
|
||||
|
||||
function MyObject() {
|
||||
EventEmitter.call(this);
|
||||
}
|
||||
util.inherits(MyObject, EventEmitter);
|
||||
|
||||
var myObject = new MyObject();
|
||||
|
||||
* Usage:
|
||||
|
||||
myObject.on('myEvent', function(num, str){console.log(num, str) });
|
||||
|
||||
myObject.emit('myEvent', 1, 'two'); // output: 1 two
|
||||
|
||||
*
|
||||
* @class EventEmitter
|
||||
* @mixin
|
||||
*/
|
||||
|
||||
function EventEmitter() {
|
||||
EventEmitter.init.call(this);
|
||||
}
|
||||
module.exports = EventEmitter;
|
||||
|
||||
// Backwards-compat with node 0.10.x
|
||||
EventEmitter.EventEmitter = EventEmitter;
|
||||
|
||||
/*
|
||||
* @deprecated This property not used (===false) in UB. Also deprecated in Node
|
||||
*/
|
||||
EventEmitter.usingDomains = false;
|
||||
|
||||
//UB EventEmitter.prototype.domain = undefined;
|
||||
/**
|
||||
* Private collection of events.
|
||||
* @private
|
||||
*/
|
||||
EventEmitter.prototype._events = undefined;
|
||||
/**
|
||||
* Use set/get MaxListeners instead direct access
|
||||
* @private
|
||||
*/
|
||||
EventEmitter.prototype._maxListeners = undefined;
|
||||
|
||||
// By default EventEmitters will print a warning if more than 10 listeners are
|
||||
// added to it. This is a useful default which helps finding memory leaks.
|
||||
EventEmitter.defaultMaxListeners = 10;
|
||||
|
||||
/**
|
||||
* @private
|
||||
*/
|
||||
EventEmitter.init = function() {
|
||||
//UB this.domain = null;
|
||||
//if (EventEmitter.usingDomains) {
|
||||
// // if there is an active domain, then attach to it.
|
||||
// domain = domain || require('domain');
|
||||
// if (domain.active && !(this instanceof domain.Domain)) {
|
||||
// this.domain = domain.active;
|
||||
// }
|
||||
//}
|
||||
|
||||
if (!this._events || this._events === Object.getPrototypeOf(this)._events) {
|
||||
this._events = {};
|
||||
this._eventsCount = 0;
|
||||
}
|
||||
|
||||
this._maxListeners = this._maxListeners || undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Obviously not all Emitters should be limited to 10. This function allows
|
||||
* that to be increased. Set to zero for unlimited.
|
||||
* @param {Number} n
|
||||
*/
|
||||
EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
|
||||
if (typeof n !== 'number' || n < 0 || isNaN(n))
|
||||
throw new TypeError('n must be a positive number');
|
||||
this._maxListeners = n;
|
||||
return this;
|
||||
};
|
||||
|
||||
function $getMaxListeners(that) {
|
||||
if (that._maxListeners === undefined)
|
||||
return EventEmitter.defaultMaxListeners;
|
||||
return that._maxListeners;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return {Number}
|
||||
*/
|
||||
EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
|
||||
return $getMaxListeners(this);
|
||||
};
|
||||
|
||||
// These standalone emit* functions are used to optimize calling of event
|
||||
// handlers for fast cases because emit() itself often has a variable number of
|
||||
// arguments and can be deoptimized because of that. These functions always have
|
||||
// the same number of arguments and thus do not get deoptimized, so the code
|
||||
// inside them can execute faster.
|
||||
function emitNone(handler, isFn, self) {
|
||||
if (isFn)
|
||||
handler.call(self);
|
||||
else {
|
||||
var len = handler.length;
|
||||
var listeners = arrayClone(handler, len);
|
||||
for (var i = 0; i < len; ++i)
|
||||
listeners[i].call(self);
|
||||
}
|
||||
}
|
||||
function emitOne(handler, isFn, self, arg1) {
|
||||
if (isFn)
|
||||
handler.call(self, arg1);
|
||||
else {
|
||||
var len = handler.length;
|
||||
var listeners = arrayClone(handler, len);
|
||||
for (var i = 0; i < len; ++i)
|
||||
listeners[i].call(self, arg1);
|
||||
}
|
||||
}
|
||||
function emitTwo(handler, isFn, self, arg1, arg2) {
|
||||
if (isFn)
|
||||
handler.call(self, arg1, arg2);
|
||||
else {
|
||||
var len = handler.length;
|
||||
var listeners = arrayClone(handler, len);
|
||||
for (var i = 0; i < len; ++i)
|
||||
listeners[i].call(self, arg1, arg2);
|
||||
}
|
||||
}
|
||||
function emitThree(handler, isFn, self, arg1, arg2, arg3) {
|
||||
if (isFn)
|
||||
handler.call(self, arg1, arg2, arg3);
|
||||
else {
|
||||
var len = handler.length;
|
||||
var listeners = arrayClone(handler, len);
|
||||
for (var i = 0; i < len; ++i)
|
||||
listeners[i].call(self, arg1, arg2, arg3);
|
||||
}
|
||||
}
|
||||
|
||||
function emitMany(handler, isFn, self, args) {
|
||||
if (isFn)
|
||||
handler.apply(self, args);
|
||||
else {
|
||||
var len = handler.length;
|
||||
var listeners = arrayClone(handler, len);
|
||||
for (var i = 0; i < len; ++i)
|
||||
listeners[i].apply(self, args);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute each of the listeners in order with the supplied arguments.
|
||||
* Returns true if event had listeners, false otherwise.
|
||||
*
|
||||
* @param {String} type Event name
|
||||
* @param {...*} eventArgs Arguments, passed to listeners
|
||||
* @return {boolean}
|
||||
*/
|
||||
EventEmitter.prototype.emit = function emit(type) {
|
||||
var er, handler, len, args, i, events/*UB domain*/;
|
||||
//UB var needDomainExit = false;
|
||||
var doError = (type === 'error');
|
||||
|
||||
events = this._events;
|
||||
if (events)
|
||||
doError = (doError && events.error == null);
|
||||
else if (!doError)
|
||||
return false;
|
||||
|
||||
//UB domain = this.domain;
|
||||
|
||||
// If there is no 'error' event listener then throw.
|
||||
if (doError) {
|
||||
er = arguments[1];
|
||||
//UB
|
||||
//if (domain) {
|
||||
// if (!er)
|
||||
// er = new Error('Uncaught, unspecified "error" event.');
|
||||
// er.domainEmitter = this;
|
||||
// er.domain = domain;
|
||||
// er.domainThrown = false;
|
||||
// domain.emit('error', er);
|
||||
//} else
|
||||
if (er instanceof Error) {
|
||||
throw er; // Unhandled 'error' event
|
||||
} else {
|
||||
// At least give some kind of context to the user
|
||||
var err = new Error('Uncaught, unspecified "error" event. (' + er + ')');
|
||||
err.context = er;
|
||||
throw err;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
handler = events[type];
|
||||
|
||||
if (!handler)
|
||||
return false;
|
||||
|
||||
//UB
|
||||
//if (domain && this !== process) {
|
||||
// domain.enter();
|
||||
// needDomainExit = true;
|
||||
//}
|
||||
|
||||
var isFn = typeof handler === 'function';
|
||||
len = arguments.length;
|
||||
switch (len) {
|
||||
// fast cases
|
||||
case 1:
|
||||
emitNone(handler, isFn, this);
|
||||
break;
|
||||
case 2:
|
||||
emitOne(handler, isFn, this, arguments[1]);
|
||||
break;
|
||||
case 3:
|
||||
emitTwo(handler, isFn, this, arguments[1], arguments[2]);
|
||||
break;
|
||||
case 4:
|
||||
emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]);
|
||||
break;
|
||||
// slower
|
||||
default:
|
||||
args = new Array(len - 1);
|
||||
for (i = 1; i < len; i++)
|
||||
args[i - 1] = arguments[i];
|
||||
emitMany(handler, isFn, this, args);
|
||||
}
|
||||
|
||||
//UB if (needDomainExit)
|
||||
// domain.exit();
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds a listener to the end of the listeners array for the specified event.
|
||||
* Will emit `newListener` event on success.
|
||||
*
|
||||
* Usage sample:
|
||||
*
|
||||
* Session.on('login', function () {
|
||||
* console.log('someone connected!');
|
||||
* });
|
||||
*
|
||||
* Returns emitter, so calls can be chained.
|
||||
*
|
||||
* @param {String} type Event name
|
||||
* @param {Function} listener
|
||||
* @return {EventEmitter}
|
||||
*/
|
||||
EventEmitter.prototype.addListener = function addListener(type, listener) {
|
||||
var m;
|
||||
var events;
|
||||
var existing;
|
||||
|
||||
if (typeof listener !== 'function')
|
||||
throw new TypeError('listener must be a function');
|
||||
|
||||
events = this._events;
|
||||
if (!events) {
|
||||
events = this._events = {};
|
||||
this._eventsCount = 0;
|
||||
} else {
|
||||
// To avoid recursion in the case that type === "newListener"! Before
|
||||
// adding it to the listeners, first emit "newListener".
|
||||
if (events.newListener) {
|
||||
/** @fires newListener */
|
||||
this.emit('newListener', type,
|
||||
listener.listener ? listener.listener : listener);
|
||||
|
||||
// Re-assign `events` because a newListener handler could have caused the
|
||||
// this._events to be assigned to a new object
|
||||
events = this._events;
|
||||
}
|
||||
existing = events[type];
|
||||
}
|
||||
|
||||
if (!existing) {
|
||||
// Optimize the case of one listener. Don't need the extra array object.
|
||||
existing = events[type] = listener;
|
||||
++this._eventsCount;
|
||||
} else {
|
||||
if (typeof existing === 'function') {
|
||||
// Adding the second element, need to change to array.
|
||||
existing = events[type] = [existing, listener];
|
||||
} else {
|
||||
// If we've already got an array, just append.
|
||||
existing.push(listener);
|
||||
}
|
||||
|
||||
// Check for listener leak
|
||||
if (!existing.warned) {
|
||||
m = $getMaxListeners(this);
|
||||
if (m && m > 0 && existing.length > m) {
|
||||
existing.warned = true;
|
||||
console.error('(node) warning: possible EventEmitter memory ' +
|
||||
'leak detected. %d %s listeners added. ' +
|
||||
'Use emitter.setMaxListeners() to increase limit.',
|
||||
existing.length, type);
|
||||
console.trace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Alias for {@link EventEmitter#addListener addListener}
|
||||
* @method
|
||||
* @param {String} type Event name
|
||||
* @param {Function} listener
|
||||
* @return {EventEmitter}
|
||||
*/
|
||||
EventEmitter.prototype.on = EventEmitter.prototype.addListener;
|
||||
|
||||
/**
|
||||
* Adds a one time listener for the event. This listener is invoked only the next time the event is fired, after which it is removed.
|
||||
* @param {String} type Event name
|
||||
* @param {Function} listener
|
||||
* @return {EventEmitter}
|
||||
*/
|
||||
EventEmitter.prototype.once = function once(type, listener) {
|
||||
if (typeof listener !== 'function')
|
||||
throw new TypeError('listener must be a function');
|
||||
|
||||
var fired = false;
|
||||
|
||||
function g() {
|
||||
this.removeListener(type, g);
|
||||
|
||||
if (!fired) {
|
||||
fired = true;
|
||||
listener.apply(this, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
g.listener = listener;
|
||||
this.on(type, g);
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Remove a listener from the listener array for the specified event.
|
||||
* Caution: changes array indices in the listener array behind the listener.
|
||||
* Emits a 'removeListener' event if the listener was removed.
|
||||
*
|
||||
* @param {String} type Event name
|
||||
* @param {Function} listener
|
||||
*/
|
||||
EventEmitter.prototype.removeListener =
|
||||
function removeListener(type, listener) {
|
||||
var list, events, position, i;
|
||||
|
||||
if (typeof listener !== 'function')
|
||||
throw new TypeError('listener must be a function');
|
||||
|
||||
events = this._events;
|
||||
if (!events)
|
||||
return this;
|
||||
|
||||
list = events[type];
|
||||
if (!list)
|
||||
return this;
|
||||
|
||||
if (list === listener || (list.listener && list.listener === listener)) {
|
||||
if (--this._eventsCount === 0)
|
||||
this._events = {};
|
||||
else {
|
||||
delete events[type];
|
||||
if (events.removeListener)
|
||||
/** @fires removeListener */
|
||||
this.emit('removeListener', type, listener);
|
||||
}
|
||||
} else if (typeof list !== 'function') {
|
||||
position = -1;
|
||||
|
||||
for (i = list.length; i-- > 0;) {
|
||||
if (list[i] === listener ||
|
||||
(list[i].listener && list[i].listener === listener)) {
|
||||
position = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (position < 0)
|
||||
return this;
|
||||
|
||||
if (list.length === 1) {
|
||||
list[0] = undefined;
|
||||
if (--this._eventsCount === 0) {
|
||||
this._events = {};
|
||||
return this;
|
||||
} else {
|
||||
delete events[type];
|
||||
}
|
||||
} else {
|
||||
spliceOne(list, position);
|
||||
}
|
||||
|
||||
if (events.removeListener)
|
||||
this.emit('removeListener', type, listener);
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Removes all listeners, or those of the specified event.
|
||||
* It's not a good idea to remove listeners that were added elsewhere in the code,
|
||||
* especially when it's on an emitter that you didn't create (e.g. sockets or file streams).
|
||||
*
|
||||
* Returns emitter, so calls can be chained.
|
||||
* @param {String} type Event name
|
||||
* @return {EventEmitter}
|
||||
*/
|
||||
EventEmitter.prototype.removeAllListeners =
|
||||
function removeAllListeners(type) {
|
||||
var listeners, events;
|
||||
|
||||
events = this._events;
|
||||
if (!events)
|
||||
return this;
|
||||
|
||||
// not listening for removeListener, no need to emit
|
||||
if (!events.removeListener) {
|
||||
if (arguments.length === 0) {
|
||||
this._events = {};
|
||||
this._eventsCount = 0;
|
||||
} else if (events[type]) {
|
||||
if (--this._eventsCount === 0)
|
||||
this._events = {};
|
||||
else
|
||||
delete events[type];
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
// emit removeListener for all listeners on all events
|
||||
if (arguments.length === 0) {
|
||||
var keys = Object.keys(events);
|
||||
for (var i = 0, key; i < keys.length; ++i) {
|
||||
key = keys[i];
|
||||
if (key === 'removeListener') continue;
|
||||
this.removeAllListeners(key);
|
||||
}
|
||||
this.removeAllListeners('removeListener');
|
||||
this._events = {};
|
||||
this._eventsCount = 0;
|
||||
return this;
|
||||
}
|
||||
|
||||
listeners = events[type];
|
||||
|
||||
if (typeof listeners === 'function') {
|
||||
this.removeListener(type, listeners);
|
||||
} else if (listeners) {
|
||||
// LIFO order
|
||||
do {
|
||||
this.removeListener(type, listeners[listeners.length - 1]);
|
||||
} while (listeners[0]);
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns an array of listeners for the specified event.
|
||||
* @param {String} type Event name
|
||||
* @return {Array.<Function>}
|
||||
*/
|
||||
EventEmitter.prototype.listeners = function listeners(type) {
|
||||
var evlistener;
|
||||
var ret;
|
||||
var events = this._events;
|
||||
|
||||
if (!events)
|
||||
ret = [];
|
||||
else {
|
||||
evlistener = events[type];
|
||||
if (!evlistener)
|
||||
ret = [];
|
||||
else if (typeof evlistener === 'function')
|
||||
ret = [evlistener];
|
||||
else
|
||||
ret = arrayClone(evlistener, evlistener.length);
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the number of listeners for a given event.
|
||||
* @param {EventEmitter} emitter
|
||||
* @param {String} type
|
||||
* @return {Number}
|
||||
*/
|
||||
EventEmitter.listenerCount = function(emitter, type) {
|
||||
if (typeof emitter.listenerCount === 'function') {
|
||||
return emitter.listenerCount(type);
|
||||
} else {
|
||||
return listenerCount.call(emitter, type);
|
||||
}
|
||||
};
|
||||
|
||||
EventEmitter.prototype.listenerCount = listenerCount;
|
||||
function listenerCount(type) {
|
||||
const events = this._events;
|
||||
|
||||
if (events) {
|
||||
const evlistener = events[type];
|
||||
|
||||
if (typeof evlistener === 'function') {
|
||||
return 1;
|
||||
} else if (evlistener) {
|
||||
return evlistener.length;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// About 1.5x faster than the two-arg version of Array#splice().
|
||||
function spliceOne(list, index) {
|
||||
for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
|
||||
list[i] = list[k];
|
||||
list.pop();
|
||||
}
|
||||
|
||||
function arrayClone(arr, i) {
|
||||
var copy = new Array(i);
|
||||
while (i--)
|
||||
copy[i] = arr[i];
|
||||
return copy;
|
||||
}
|
348
contrib/mORMot/SyNode/core_modules/node_modules/fs.js
generated
vendored
Normal file
348
contrib/mORMot/SyNode/core_modules/node_modules/fs.js
generated
vendored
Normal file
@@ -0,0 +1,348 @@
|
||||
/**
|
||||
* SyNode file-system routines. We try to implement here the same interface as in <a href="http://nodejs.org/api/fs.html">NodeJS fs</a>
|
||||
*
|
||||
* var fs = require('fs');
|
||||
* var content = fs.readFileSync('c:\\a.txt', 'utf-8);
|
||||
*
|
||||
* @module fs
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
const constants = process.binding('constants').fs
|
||||
const internalFS = require('internal/fs')
|
||||
const util = require('util')
|
||||
const fs = exports;
|
||||
const {fileStat, directoryExists, fileExists, readDir,
|
||||
realpath, rename, loadFileToBuffer,
|
||||
writeFile, appendFile,
|
||||
deleteFile, forceDirectories, removeDir,
|
||||
} = process.binding('fs')
|
||||
const pathModule = require('path');
|
||||
const {
|
||||
assertEncoding,
|
||||
stringToFlags
|
||||
} = internalFS;
|
||||
|
||||
Object.defineProperty(exports, 'constants', {
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
value: constants
|
||||
})
|
||||
|
||||
const kMinPoolSpace = 128;
|
||||
const { kMaxLength } = require('buffer')
|
||||
|
||||
const isWindows = process.platform === 'win32'
|
||||
|
||||
function getOptions(options, defaultOptions) {
|
||||
if (options === null || options === undefined ||
|
||||
typeof options === 'function') {
|
||||
return defaultOptions;
|
||||
}
|
||||
|
||||
if (typeof options === 'string') {
|
||||
defaultOptions = util._extend({}, defaultOptions);
|
||||
defaultOptions.encoding = options;
|
||||
options = defaultOptions;
|
||||
} else if (typeof options !== 'object') {
|
||||
throw new TypeError('"options" must be a string or an object, got ' +
|
||||
typeof options + ' instead.');
|
||||
}
|
||||
|
||||
if (options.encoding !== 'buffer')
|
||||
assertEncoding(options.encoding);
|
||||
return options;
|
||||
}
|
||||
|
||||
function nullCheck(path, callback) {
|
||||
if (('' + path).indexOf('\u0000') !== -1) {
|
||||
var er = new Error('Path must be a string without null bytes');
|
||||
er.code = 'ENOENT';
|
||||
// SyNode if (typeof callback !== 'function')
|
||||
throw er;
|
||||
// SyNode process.nextTick(callback, er);
|
||||
// SyNode return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check specified path is file (or symlynk to file)
|
||||
* @param path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
exports.isFile = function isFile(path){
|
||||
return fileExists(path);
|
||||
};
|
||||
|
||||
/**
|
||||
* Check specified path is folder (or symlynk to folder)
|
||||
* @param path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
exports.isDir = function isDir(path){
|
||||
return directoryExists(path);
|
||||
};
|
||||
|
||||
const emptyObj = Object.create(null);
|
||||
/**
|
||||
* Synchronous realpath(3). Returns the resolved path (resolve symlinks, junctions on Windows, /../)
|
||||
*/
|
||||
exports.realpathSync = function realpathSync(p, options){
|
||||
if (!options)
|
||||
options = emptyObj;
|
||||
else
|
||||
options = getOptions(options, emptyObj);
|
||||
if (typeof p !== 'string') {
|
||||
// SyNode handleError((p = getPathFromURL(p)));
|
||||
// SyNode if (typeof p !== 'string')
|
||||
p += '';
|
||||
}
|
||||
nullCheck(p);
|
||||
p = pathModule.resolve(p);
|
||||
|
||||
const cache = options[internalFS.realpathCacheKey];
|
||||
const maybeCachedResult = cache && cache.get(p);
|
||||
if (maybeCachedResult) {
|
||||
return maybeCachedResult;
|
||||
}
|
||||
let res = realpath(p);
|
||||
if (cache) cache.set(p, res);
|
||||
return res;
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads the entire contents of a TEXT file.
|
||||
* If BOM found - decode text file to string using BOM
|
||||
* If BOM not found - use forceUFT8 parameter.
|
||||
* @param {String} fileName
|
||||
* @param {Boolean} [forceUFT8] If no BOM found and forceUFT8 is True (default) - we expect file in UTF8 format, else in ascii
|
||||
* @returns {String}
|
||||
*/
|
||||
exports.loadFile = function (fileName, forceUFT8){
|
||||
return loadFile(fileName, forceUFT8);
|
||||
};
|
||||
|
||||
/**
|
||||
* Reads the entire contents of a file. If options.encoding == 'bin', then the ArrayBuffer is returned.
|
||||
* If no options is specified at all - result is String as in {@link fs.loadFile}
|
||||
* @param {String} fileName Absolute path to file
|
||||
* @param {Object} [options]
|
||||
* @param {String|Null} [options.encoding] Default to null. Possible values: 'bin'|'ascii'|'utf-8'
|
||||
* @returns {String|ArrayBuffer}
|
||||
*/
|
||||
function readFileSync(fileName, options){
|
||||
let stat = fileStat(fileName);
|
||||
if (!stat) {
|
||||
throw new Error('no such file or directory, open \'' + fileName + '\'');
|
||||
}
|
||||
if (!options || (options && (options.encoding !== 'bin'))) {
|
||||
options = getOptions(options, {flag: 'r'});
|
||||
}
|
||||
|
||||
if (options.encoding && ((options.encoding === 'ascii') || (options.encoding === 'utf8') || (options.encoding === 'utf-8'))) {
|
||||
return loadFile(fileName, !(options.encoding === 'ascii'));
|
||||
} else {
|
||||
let buf = loadFileToBuffer(fileName) // UInt8Array
|
||||
if (options.encoding === 'bin') return buf // ub 4.x compatibility mode
|
||||
buf = Buffer.from(buf)
|
||||
if (options.encoding)
|
||||
buf = buf.toString(options.encoding);
|
||||
return buf;
|
||||
}
|
||||
};
|
||||
exports.readFileSync = readFileSync
|
||||
|
||||
function rethrow() {
|
||||
return function(err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function maybeCallback(cb) {
|
||||
return typeof cb === 'function' ? cb : rethrow();
|
||||
}
|
||||
|
||||
function makeOneArgFuncAsync(oneArgSyncFunc){
|
||||
return function(arg, cb){
|
||||
var _res;
|
||||
var callback = maybeCallback(cb);
|
||||
try {
|
||||
_res = oneArgSyncFunc(arg);
|
||||
callback(null, _res);
|
||||
} catch(e){
|
||||
callback(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.readFile = function readFile(fileName, options, callback_){
|
||||
var stat = fileStat(fileName);
|
||||
var callback = maybeCallback(arguments[arguments.length - 1]);
|
||||
if (!stat) {
|
||||
callback(new Error('no such file or directory, open \'' + fileName + '\''));
|
||||
} else {
|
||||
callback(null, readFileSync(fileName, options))
|
||||
}
|
||||
};
|
||||
|
||||
//noinspection JSUnusedLocalSymbols
|
||||
/**
|
||||
* Create all missing folders in the given path. Only absolute path supported. Throw error in case of fail
|
||||
* @param {String} path path for creation.
|
||||
* @param {Number} [mode] Ignored under Windows
|
||||
*/
|
||||
exports.mkdirSync = function mkdirSync(path, mode){
|
||||
if (!forceDirectories(path)){
|
||||
throw new Error('can\'t create directory ' + path);
|
||||
}
|
||||
};
|
||||
|
||||
/** Read file names from directory (include folder names).
|
||||
* Return array of file names. In case directory not exists - throw error
|
||||
* @param {String} path
|
||||
* @return {Array.<String>}
|
||||
*/
|
||||
function readdirSync(path){
|
||||
var res = readDir(path, true);
|
||||
if (res == null) {
|
||||
throw new Error('can not read dir ' + path);
|
||||
} else {
|
||||
return res;
|
||||
}
|
||||
};
|
||||
exports.readdirSync = readdirSync;
|
||||
|
||||
exports.readdir = makeOneArgFuncAsync(readdirSync);
|
||||
|
||||
/**
|
||||
* Get file statistics. Will throw in case file or folder does not exists.
|
||||
* @param fileName
|
||||
* @returns {Boolean|{atime: Date, mtime: Date, ctime: Date, size: number, _fileName: string, isDirectory: function}}
|
||||
*/
|
||||
function statSync(fileName){
|
||||
var oStat;
|
||||
|
||||
oStat = fileStat(fileName);
|
||||
if (oStat === null) throw new Error('ENOENT: no such file or directory, stat ' + fileName)
|
||||
oStat._fileName = fileName;
|
||||
oStat.isDirectory = function(){
|
||||
return fs.isDir(this._fileName);
|
||||
};
|
||||
oStat.isFile = function(){
|
||||
return !fs.isDir(this._fileName);
|
||||
};
|
||||
oStat.isSymbolicLink = function(){
|
||||
return false; //TODO - implement
|
||||
};
|
||||
return oStat;
|
||||
};
|
||||
|
||||
exports.statSync = statSync;
|
||||
|
||||
exports.lstatSync = statSync;
|
||||
|
||||
exports.stat = function stat(fileName, callback_){
|
||||
var _stat
|
||||
var callback = maybeCallback(arguments[arguments.length - 1]);
|
||||
try {
|
||||
_stat = statSync(fileName);
|
||||
callback(null, _stat);
|
||||
} catch (e) {
|
||||
callback(e);
|
||||
}
|
||||
};
|
||||
|
||||
//todo - lstat is a followSymLync version of stat
|
||||
exports.lstat = exports.stat;
|
||||
|
||||
/**
|
||||
* Write to file
|
||||
* Actually implements {@link UBWriter#write}
|
||||
* @param {String} fileName Full absolute file path
|
||||
* @param {ArrayBuffer|Object|String} data Data to write. If Object - it stringify before write
|
||||
* @param {Object} [options]
|
||||
* @param {String} [options.encoding] Encode data to `encoding` before write. Default to `utf-8` in case data is String or `bin` in case data is ArrayBuffer.
|
||||
* One of "utf-8"|"ucs2"|"bin"|"base64".
|
||||
*/
|
||||
exports.writeFileSync = function writeFileSync(fileName, data, options){
|
||||
//var res = writeFile(fileName, data);
|
||||
var
|
||||
encoding = options && options.encoding,
|
||||
res;
|
||||
res = encoding ? writeFile(fileName, data, encoding) : writeFile(fileName, data);
|
||||
if(!res)
|
||||
throw new Error('can not write file ' + fileName);
|
||||
else return res;
|
||||
};
|
||||
|
||||
/**
|
||||
* Append data to a file, creating the file if it not yet exists
|
||||
* Actually implement {UBWriter#write}
|
||||
* @param {String} fileName Full absolute file path
|
||||
* @param {ArrayBuffer|Object|String} data Data to write. `Object` are stringified before write
|
||||
* @param {Object} [options]
|
||||
* @param {String} [options.encoding] Encode data to `encoding` before write.
|
||||
* Default to `utf-8` in case data is String or `bin` in case data is ArrayBuffer.
|
||||
* Possible values: "utf-8"|"ucs2"|"bin"|"base64".
|
||||
*/
|
||||
exports.appendFileSync = function appendFileSync(fileName, data, options){
|
||||
var
|
||||
encoding = options && options.encoding,
|
||||
res;
|
||||
res = encoding ? appendFile(fileName, data, encoding) : appendFile(fileName, data);
|
||||
if(!res)
|
||||
throw new Error('can not write file ' + fileName);
|
||||
else return res;
|
||||
};
|
||||
|
||||
/**
|
||||
* Check `path` exists (can be file, folder or symlync)
|
||||
* @param path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
exports.existsSync = function existsSync(path){
|
||||
return !!fileStat(path);
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete file.
|
||||
*/
|
||||
function unlinkSync(path){
|
||||
try{
|
||||
return deleteFile(path)
|
||||
}catch(e){
|
||||
return false;
|
||||
}
|
||||
};
|
||||
exports.unlinkSync = unlinkSync;
|
||||
|
||||
exports.unlink = makeOneArgFuncAsync(unlinkSync);
|
||||
|
||||
/**
|
||||
* Delete non-empty directory. See {@link removeDir} for details
|
||||
* @param {String} path path to remove
|
||||
*/
|
||||
exports.rmdirSync = function rmdirSync(path){
|
||||
return removeDir(path);
|
||||
};
|
||||
|
||||
/**
|
||||
* Move (rename) file.
|
||||
* @param {String} oldPath
|
||||
* @param {String} newPath
|
||||
*/
|
||||
exports.renameSync = function renameSync(oldPath, newPath){
|
||||
nullCheck(oldPath);
|
||||
nullCheck(newPath);
|
||||
return rename(pathModule._makeLong(oldPath),
|
||||
pathModule._makeLong(newPath));
|
||||
};
|
||||
|
||||
/**
|
||||
* Fake class for nodeJS compatibility
|
||||
*/
|
||||
exports.ReadStream = ReadStream;
|
||||
function ReadStream(){}
|
499
contrib/mORMot/SyNode/core_modules/node_modules/http.js
generated
vendored
Normal file
499
contrib/mORMot/SyNode/core_modules/node_modules/http.js
generated
vendored
Normal file
@@ -0,0 +1,499 @@
|
||||
/**
|
||||
* HTTP client.
|
||||
* @example
|
||||
*
|
||||
var http = require('http');
|
||||
var request = http.request({
|
||||
//alternative to host/port/path is
|
||||
//URL: 'http://localhost:888/getAppInfo',
|
||||
host: 'localhost', port: '80', path: '/getAppInfo',
|
||||
method: 'POST',
|
||||
sendTimeout: 30000, receiveTimeout: 30000,
|
||||
keepAlive: true,
|
||||
compressionEnable: true
|
||||
});
|
||||
request.write('Add string to response');
|
||||
var fileContent = fs.readFileSync('d:\binaryFile.txt'); // return ArrayBuffer, since encoding not passed
|
||||
request.write(fileContent, 'base64'); // write file content as base64 encoded string
|
||||
var response = request.end();
|
||||
|
||||
var http = require('http');
|
||||
var assert = require('assert');
|
||||
var DOMParser = require('xmldom').DOMParser;
|
||||
// set global proxy settings if client is behind a proxy
|
||||
// http.setGlobalProxyConfiguration('proxy.main:3249', 'localhost');
|
||||
var resp = http.get('https://synopse.info/fossil/wiki/Synopse+OpenSource');
|
||||
// check we are actually behind a proxy
|
||||
// assert.ok(resp.headers('via').startsWith('1.1 proxy.main'), 'proxy used');
|
||||
var index = resp.read();
|
||||
console.log(index);
|
||||
// var doc = new DOMParser().parseFromString(index);
|
||||
// assert.ok(doc.documentElement.textContent.startsWith('mORMot'), 'got mORMot from mORMot');
|
||||
*
|
||||
* @module http
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
const CRLF = '\r\n'
|
||||
const url = require('url')
|
||||
const EventEmitter = require('events').EventEmitter
|
||||
const util = require('util')
|
||||
const THTTPClient = process.binding('synode_http').THTTPClient
|
||||
|
||||
/* Global http proxy configuration.
|
||||
Default value for proxy server getted form http_proxy environment variable.
|
||||
Under Windows (Docker for Windows for example) HTTP_PROXY is used, so fallback to it also
|
||||
*/
|
||||
var
|
||||
proxyConfig = {
|
||||
server: process.env.http_proxy || process.env.HTTP_PROXY || '',
|
||||
bypass: ''
|
||||
},
|
||||
connectionDefaults = {
|
||||
useHTTPS: false,
|
||||
useCompression: true,
|
||||
keepAlive: false,
|
||||
connectTimeout: 60000,
|
||||
sendTimeout: 30000,
|
||||
receiveTimeout: 30000
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure global ( on the `http` module level) proxy server in case you can't configure it using
|
||||
* either **`proxycfg.exe -u`** on Windows XP or **`netsh winhttp import proxy source=ie`** for other win version
|
||||
* or by pass `options.proxyName` parameter.
|
||||
*
|
||||
* Settings applied only for newly created {ClientRequest} instances.
|
||||
*
|
||||
* See for details <a href="http://msdn.microsoft.com/en-us/library/windows/desktop/aa383996(v=vs.85).aspx">this MS article</a>
|
||||
*
|
||||
* @param {String} proxy name of the proxy server to use in format `[[http|https]://]host[:port]` For example 'http://proxy.my.domain:3249'
|
||||
* @param {String|Array} [bypass] semicolon delimited list jr array of host names or IP addresses, or host masks or both, that should not be routed through the proxy
|
||||
*/
|
||||
exports.setGlobalProxyConfiguration = function setGlobalProxyConfiguration (proxy, bypass) {
|
||||
proxyConfig.proxy = proxy || ''
|
||||
if (Array.isArray(bypass)) {
|
||||
bypass = bypass.join(';')
|
||||
}
|
||||
proxyConfig.bypass = bypass || ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Override global ( on the `http` module level) connectiuon defaults.
|
||||
*
|
||||
* Settings applied only for newly created {ClientRequest} instances.
|
||||
*
|
||||
* var http = require('http');
|
||||
* http.setGlobalConnectionDefaults({receiveTimeout: 60000}); // set receive timeout to 60 sec.
|
||||
*
|
||||
* @param {Object} defaults
|
||||
* @param {Boolean} [defaults.useHTTPS=false]
|
||||
* @param {Boolean} [defaults.useCompression=true] Send 'Accept-encoding: gzip' header to server & unzip zipper responses
|
||||
* @param {Boolean} [defaults.keepAlive=false] Use keep Alive HTTP protocol feature if server support it.
|
||||
* @param {Number} [defaults.sendTimeout=30000] Send timeout in ms.
|
||||
* @param {Number} [defaults.receiveTimeout=30000] Receive timeout in ms.
|
||||
* @param {Number} [defaults.connectTimeout=60000] Connect timeout in ms.
|
||||
*/
|
||||
exports.setGlobalConnectionDefaults = function setGlobalConnectionDefaults (defaults) {
|
||||
defaults = defaults || {}
|
||||
Object.keys(connectionDefaults).forEach(function (key) {
|
||||
if (defaults.hasOwnProperty(key)) {
|
||||
connectionDefaults[key] = defaults[key]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new HTTP server connection. In case server behind the proxy - see {@link http.setGlobalProxyConfiguration} function.
|
||||
* @param {Object|String} options Either URL string in format `protocol://host:port/path` or config
|
||||
* @param {String} [options.URL] Service URL in format `protocol://host:port/path`. Will override `useHTTPS`, `server`, `host`, `port` and `path` if passed
|
||||
* @param {String} [options.server] DEPRECATED. Server to connect in format 'host:port' or 'host' in case port == 80.
|
||||
* @param {String} [options.host] Host to connect. If `server` not specified this value used
|
||||
* @param {String} [options.port] Port. Default is 80 for HTTP or 443 for HTTPS
|
||||
* @param {String} [options.path='/'] Request path. Defaults to '/'. Should include query string if any. E.G. '/index.html?page=12'
|
||||
* @param {String} [options.method='GET'] HTTP method to use for request
|
||||
* @param {Object<string, string>} [options.headers] An object containing request headers
|
||||
* @param {Boolean} [options.useHTTPS=false]
|
||||
* @param {Boolean} [options.useCompression=true] Send 'Accept-encoding: gzip' header to server & unzip zipper responses
|
||||
* @param {Boolean} [options.keepAlive=false] Use keep Alive HTTP protocol feature if server support it.
|
||||
* @param {Number} [options.sendTimeout=30000] Send timeout in ms.
|
||||
* @param {Number} [options.receiveTimeout=30000] Receive timeout in ms.
|
||||
* @param {Number} [options.connectTimeout=30000] Connect timeout in ms.
|
||||
* @return {ClientRequest}
|
||||
*/
|
||||
exports.request = function request (options) {
|
||||
var
|
||||
parsedURL
|
||||
if (typeof options === 'string') {
|
||||
options = url.parse(options)
|
||||
options.host = options.hostname
|
||||
} else if (options.URL) {
|
||||
parsedURL = url.parse(options.URL)
|
||||
Object.assign(options, parsedURL)
|
||||
options.host = options.hostname
|
||||
} else if (options.server) {
|
||||
var host_port = options.server.split(':')
|
||||
options.host = host_port[0]
|
||||
|
||||
options.port = host_port[1]
|
||||
}
|
||||
if (!options.host) {
|
||||
throw new Error('server host is mandatory')
|
||||
}
|
||||
if (!options.hostname) { options.hostname = options.host }
|
||||
|
||||
options.path = options.path || '/'
|
||||
if (options.path.charAt(0) !== '/') options.path = '/' + options.path // need valid url according to the HTTP/1.1 RFC
|
||||
options.headers = options.headers || {}
|
||||
if (options.protocol) {
|
||||
options.useHTTPS = (options.protocol === 'https:')
|
||||
} else {
|
||||
options.useHTTPS = options.useHTTPS == null ? connectionDefaults.useHTTPS : options.useHTTPS
|
||||
}
|
||||
options.port = options.port || (options.useHTTPS ? '443' : '80')
|
||||
options.useCompression = options.useCompression == null ? true : options.useCompression
|
||||
options.keepAlive = (options.keepAlive === true) ? 1 : connectionDefaults.keepAlive
|
||||
options.sendTimeout = options.sendTimeout || connectionDefaults.sendTimeout
|
||||
options.receiveTimeout = options.receiveTimeout || connectionDefaults.receiveTimeout
|
||||
options.connectTimeout = options.connectTimeout || connectionDefaults.connectTimeout
|
||||
options.method = options.method || 'GET'
|
||||
return new ClientRequest(options)
|
||||
}
|
||||
var request = exports.request
|
||||
|
||||
function forEachSorted (obj, iterator, context) {
|
||||
var keys = Object.keys(obj).sort()
|
||||
keys.forEach(function (key) {
|
||||
iterator.call(context, obj[key], key)
|
||||
})
|
||||
return keys
|
||||
}
|
||||
|
||||
/**
|
||||
* Add parameters to URL
|
||||
*
|
||||
* http.buildURL('/myMethod', {a: 1, b: "1212"}; // '/myMethod?a=1&b=1212
|
||||
*
|
||||
* @param {String} url
|
||||
* @param {Object} params
|
||||
* @returns {String}
|
||||
*/
|
||||
exports.buildURL = function buildURL (url, params) {
|
||||
if (!params) {
|
||||
return url
|
||||
}
|
||||
var parts = []
|
||||
forEachSorted(params, function (value, key) {
|
||||
if (value == null) {
|
||||
return
|
||||
}
|
||||
if (!Array.isArray(value)) {
|
||||
value = [value]
|
||||
}
|
||||
|
||||
value.forEach(function (v) {
|
||||
if (typeof v === 'object') {
|
||||
v = JSON.stringify(v)
|
||||
}
|
||||
parts.push(encodeURIComponent(key) + '=' + encodeURIComponent(v))
|
||||
})
|
||||
})
|
||||
return url + ((url.indexOf('?') == -1) ? '?' : '&') + parts.join('&')
|
||||
}
|
||||
|
||||
var buildUrl = exports.buildURL
|
||||
|
||||
/**
|
||||
* Since most requests are GET requests without bodies, we provides this convenience method.
|
||||
* The two difference between this method and http.request() is that
|
||||
*
|
||||
* - it sets the method to GET and calls req.end() automatically
|
||||
* - can optionally take URLParams Object {paramName: paramValue, ..} and add parameters to request path
|
||||
*
|
||||
* @param {Object} options Request options as described in {@link http.request}
|
||||
* @param {Object} [URLParams] optional parameters to add to options.path
|
||||
* @returns {IncomingMessage}
|
||||
*/
|
||||
exports.get = function get (options, URLParams) {
|
||||
var req = request(options)
|
||||
if (URLParams) {
|
||||
req.setPath(buildUrl(req.options.path, URLParams))
|
||||
}
|
||||
req.setMethod('GET')
|
||||
return req.end()
|
||||
}
|
||||
|
||||
/**
|
||||
* This object is created internally and returned from {@link http.request}
|
||||
* It represents an in-progress request whose header has already been queued.
|
||||
* The header is still mutable using the {@link ClientRequest.setHeader setHeader(name, value)},
|
||||
* {@link ClientRequest#getHeader getHeader(name)}, {@link ClientRequest#removeHeader removeHeader(name)} API.
|
||||
* The actual header will be sent along with the {@link ClientRequest#end end()}.
|
||||
*
|
||||
* `path` & `method` parameter is still mutable using {@link ClientRequest#setPath setPath(path)} & {@link ClientRequest#setMethod setMethod(HTTPMethod)}
|
||||
|
||||
* @class ClientRequest
|
||||
* @implements {UBWriter}
|
||||
* @protected
|
||||
* @param {Object} options
|
||||
*/
|
||||
function ClientRequest (options) {
|
||||
this.options = Object.assign({}, options)
|
||||
const _http = this.connection = new THTTPClient()
|
||||
_http.initialize(options.host, options.port, options.useHTTPS, options.useCompression,
|
||||
proxyConfig.proxy, proxyConfig.bypass, options.connectTimeout, options.sendTimeout, options.receiveTimeout
|
||||
)
|
||||
_http.keepAlive = options.keepAlive ? 1 : 0
|
||||
|
||||
// add EventEmitter to process object
|
||||
EventEmitter.call(this)
|
||||
util._extend(this, EventEmitter.prototype)
|
||||
|
||||
Object.defineProperty(this, 'path', {
|
||||
get: function () { return this.options.path },
|
||||
set: function (val) { this.options.path = val }
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a chunk of data to request. Actual sending performed by `end()` call.
|
||||
* @inheritDoc
|
||||
*/
|
||||
ClientRequest.prototype.write = function (data, encoding) {
|
||||
this.connection.write(data, encoding)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set all headers delimited by CRLF by once
|
||||
* @param {String} allHeaders
|
||||
*/
|
||||
ClientRequest.prototype.setHeadersAsString = function (allHeaders) {
|
||||
this.options._headersAsString = allHeaders
|
||||
}
|
||||
|
||||
function makeRequestHeaders (request) {
|
||||
if (request.options._headersAsString) return request.options._headersAsString
|
||||
|
||||
let arr = []
|
||||
let head = request.options.headers
|
||||
for (let prop in head) {
|
||||
arr.push(prop + ': ' + head[prop])
|
||||
}
|
||||
return arr.join(CRLF)
|
||||
}
|
||||
/**
|
||||
* End request by writing a last chunk of data (optional) and send request to server.
|
||||
* See {@link UBWriter#write} for parameters
|
||||
* @returns {IncomingMessage}
|
||||
*/
|
||||
ClientRequest.prototype.end = function (data, encoding) {
|
||||
var
|
||||
_http = this.connection,
|
||||
rUlr
|
||||
_http.writeEnd(data, encoding)
|
||||
_http.method = this.options.method
|
||||
_http.headers = makeRequestHeaders(this)
|
||||
try {
|
||||
_http.doRequest(this.options.path)
|
||||
} catch (e) {
|
||||
rUlr = (this.options.protocol || 'http:') + '//' + this.options.hostname + ':' + this.options.port + this.options.path
|
||||
throw new Error('Request to ' + rUlr + ' fail. Message: ' + e.message)
|
||||
}
|
||||
let msg = new IncomingMessage(_http)
|
||||
if (!this.emit('response', msg) ||
|
||||
!msg.emit('data', new Buffer(msg.read(msg.encoding === 'binary' ? 'bin' : msg.encoding === 'utf8' ? 'utf-8' : msg.encoding)).toString(msg.encoding)) ||
|
||||
!msg.emit('end')) {
|
||||
return msg
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set new path for request. Usually used during several request to the same server to avoid socket recreation.
|
||||
* @param {String} path New path. Should include query string if any. E.G. '/index.html?page=12'
|
||||
*/
|
||||
ClientRequest.prototype.setPath = function (path) {
|
||||
this.options.path = path
|
||||
}
|
||||
|
||||
/**
|
||||
* Set new HTTP method for request. Usually used during several request to the same server to avoid socket recreation.
|
||||
* @param {String} method
|
||||
*/
|
||||
ClientRequest.prototype.setMethod = function (method) {
|
||||
this.options.method = method
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a single header value for implicit headers.
|
||||
* If this header already exists in the to-be-sent headers, its value will be replaced.
|
||||
* Use an array of strings here if you need to send multiple headers with the same name
|
||||
*
|
||||
* request.setHeader('Content-Type', 'text/html');
|
||||
* request.setHeader('Set-Cookie', ['type=ninja', 'language=javascript']);
|
||||
*
|
||||
* @param {String} name
|
||||
* @param {String|Array} value
|
||||
*/
|
||||
ClientRequest.prototype.setHeader = function (name, value) {
|
||||
this.options.headers[name] = Array.isArray(value) ? value.join(';') : value
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads out a header that's already been queued but not sent to the client.
|
||||
* @param {String} name
|
||||
* @returns {String}
|
||||
*/
|
||||
ClientRequest.prototype.getHeader = function (name) {
|
||||
if (arguments.length < 1) {
|
||||
throw new Error('`name` is required for getHeader().')
|
||||
}
|
||||
return this.options.headers[name]
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a header that's queued for implicit sending
|
||||
* @param {String} name
|
||||
*/
|
||||
ClientRequest.prototype.removeHeader = function (name) {
|
||||
if (arguments.length < 1) {
|
||||
throw new Error('`name` is required for removeHeader().')
|
||||
}
|
||||
delete this.options.headers[name]
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of HTTP request
|
||||
* @class IncomingMessage
|
||||
* @implements {UBReader}
|
||||
* @param {THTTPClient} httpClient
|
||||
* @protected
|
||||
*/
|
||||
function IncomingMessage (httpClient) {
|
||||
this._http = httpClient
|
||||
/**
|
||||
* Default encoding for read call
|
||||
* @type {String}
|
||||
*/
|
||||
this.encoding = 'utf-8'
|
||||
/** @private */
|
||||
this._parsedHeaders = null
|
||||
/**
|
||||
* HTTP status code. See also {STATUS_CODES}
|
||||
* @type {Number}
|
||||
* @readonly
|
||||
*/
|
||||
this.statusCode = this._http.responseStatus
|
||||
|
||||
// add EventEmitter to IncomingMessage object
|
||||
EventEmitter.call(this)
|
||||
util._extend(this, EventEmitter.prototype)
|
||||
|
||||
/**
|
||||
* Response headers, transformed to JS object. Headers name is a keys in lower case
|
||||
*/
|
||||
Object.defineProperty(this, 'headers', {
|
||||
get: () => this._parsedHeaders ? this._parsedHeaders : this.__doParseHeaders()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Change default encoding for read request
|
||||
* @param {String} encoding
|
||||
*/
|
||||
IncomingMessage.prototype.setEncoding = function (encoding) {
|
||||
this.encoding = encoding
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a response body. See {@link UBReader#read} for parameters
|
||||
* @param {String} [encoding] If omitted `this.encoding` in used
|
||||
*/
|
||||
IncomingMessage.prototype.read = function (encoding) {
|
||||
return this._http.read(encoding || this.encoding)
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal function for parse response headers
|
||||
* TODO - improve node compatibility - some headers MUST me merged. See https://nodejs.org/api/http.html#http_message_headers
|
||||
* @private
|
||||
*/
|
||||
IncomingMessage.prototype.__doParseHeaders = function () {
|
||||
var
|
||||
h, hObj, hPart
|
||||
|
||||
if (!this._parsedHeaders) {
|
||||
h = this._http.responseHeaders.split(CRLF)
|
||||
hObj = {}
|
||||
h.forEach(function (header) {
|
||||
if (header) {
|
||||
hPart = header.split(': ', 2)
|
||||
if (hPart.length = 2) { hObj[hPart[0].toLowerCase()] = hPart[1] }
|
||||
}
|
||||
})
|
||||
this._parsedHeaders = hObj
|
||||
}
|
||||
|
||||
return this._parsedHeaders
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP status codes.
|
||||
* @type {Object.<number, string>}
|
||||
*/
|
||||
exports.STATUS_CODES = {
|
||||
100: 'Continue',
|
||||
101: 'Switching Protocols',
|
||||
102: 'Processing', // RFC 2518, obsoleted by RFC 4918
|
||||
200: 'OK',
|
||||
201: 'Created',
|
||||
202: 'Accepted',
|
||||
203: 'Non-Authoritative Information',
|
||||
204: 'No Content',
|
||||
205: 'Reset Content',
|
||||
206: 'Partial Content',
|
||||
207: 'Multi-Status', // RFC 4918
|
||||
300: 'Multiple Choices',
|
||||
301: 'Moved Permanently',
|
||||
302: 'Moved Temporarily',
|
||||
303: 'See Other',
|
||||
304: 'Not Modified',
|
||||
305: 'Use Proxy',
|
||||
307: 'Temporary Redirect',
|
||||
400: 'Bad Request',
|
||||
401: 'Unauthorized',
|
||||
402: 'Payment Required',
|
||||
403: 'Forbidden',
|
||||
404: 'Not Found',
|
||||
405: 'Method Not Allowed',
|
||||
406: 'Not Acceptable',
|
||||
407: 'Proxy Authentication Required',
|
||||
408: 'Request Time-out',
|
||||
409: 'Conflict',
|
||||
410: 'Gone',
|
||||
411: 'Length Required',
|
||||
412: 'Precondition Failed',
|
||||
413: 'Request Entity Too Large',
|
||||
414: 'Request-URI Too Large',
|
||||
415: 'Unsupported Media Type',
|
||||
416: 'Requested Range Not Satisfiable',
|
||||
417: 'Expectation Failed',
|
||||
418: 'I\'m a teapot', // RFC 2324
|
||||
422: 'Unprocessable Entity', // RFC 4918
|
||||
423: 'Locked', // RFC 4918
|
||||
424: 'Failed Dependency', // RFC 4918
|
||||
425: 'Unordered Collection', // RFC 4918
|
||||
426: 'Upgrade Required', // RFC 2817
|
||||
428: 'Precondition Required', // RFC 6585
|
||||
429: 'Too Many Requests', // RFC 6585
|
||||
431: 'Request Header Fields Too Large', // RFC 6585
|
||||
500: 'Internal Server Error',
|
||||
501: 'Not Implemented',
|
||||
502: 'Bad Gateway',
|
||||
503: 'Service Unavailable',
|
||||
504: 'Gateway Time-out',
|
||||
505: 'HTTP Version Not Supported',
|
||||
506: 'Variant Also Negotiates', // RFC 2295
|
||||
507: 'Insufficient Storage', // RFC 4918
|
||||
509: 'Bandwidth Limit Exceeded',
|
||||
510: 'Not Extended', // RFC 2774
|
||||
511: 'Network Authentication Required' // RFC 6585
|
||||
}
|
17
contrib/mORMot/SyNode/core_modules/node_modules/https.js
generated
vendored
Normal file
17
contrib/mORMot/SyNode/core_modules/node_modules/https.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* HTTPS client.
|
||||
* @module https
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
let http = require('http');
|
||||
|
||||
exports.request = function request(options) {
|
||||
options.useHTTPS = true;
|
||||
return http.request(options);
|
||||
}
|
||||
exports.get = function request(options) {
|
||||
options.useHTTPS = true;
|
||||
return http.get(options);
|
||||
}
|
||||
|
551
contrib/mORMot/SyNode/core_modules/node_modules/internal/errors.js
generated
vendored
Normal file
551
contrib/mORMot/SyNode/core_modules/node_modules/internal/errors.js
generated
vendored
Normal file
@@ -0,0 +1,551 @@
|
||||
/* eslint documented-errors: "error" */
|
||||
/* eslint alphabetize-errors: "error" */
|
||||
/* eslint prefer-util-format-errors: "error" */
|
||||
|
||||
'use strict';
|
||||
|
||||
// The whole point behind this internal module is to allow Node.js to no
|
||||
// longer be forced to treat every error message change as a semver-major
|
||||
// change. The NodeError classes here all expose a `code` property whose
|
||||
// value statically and permanently identifies the error. While the error
|
||||
// message may change, the code should not.
|
||||
|
||||
const kCode = Symbol('code');
|
||||
const kInfo = Symbol('info');
|
||||
const messages = new Map();
|
||||
|
||||
const { kMaxLength } = process.binding('buffer');
|
||||
const { defineProperty } = Object;
|
||||
|
||||
// Lazily loaded
|
||||
var util = null;
|
||||
var buffer;
|
||||
|
||||
function makeNodeError(Base) {
|
||||
return class NodeError extends Base {
|
||||
constructor(key, ...args) {
|
||||
super(message(key, args));
|
||||
defineProperty(this, kCode, {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: key,
|
||||
writable: true
|
||||
});
|
||||
}
|
||||
|
||||
get name() {
|
||||
return `${super.name} [${this[kCode]}]`;
|
||||
}
|
||||
|
||||
set name(value) {
|
||||
defineProperty(this, 'name', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value,
|
||||
writable: true
|
||||
});
|
||||
}
|
||||
|
||||
get code() {
|
||||
return this[kCode];
|
||||
}
|
||||
|
||||
set code(value) {
|
||||
defineProperty(this, 'code', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value,
|
||||
writable: true
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function lazyBuffer() {
|
||||
if (buffer === undefined)
|
||||
buffer = require('buffer').Buffer;
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// A specialized Error that includes an additional info property with
|
||||
// additional information about the error condition. The code key will
|
||||
// be extracted from the context object or the ERR_SYSTEM_ERROR default
|
||||
// will be used.
|
||||
class SystemError extends makeNodeError(Error) {
|
||||
constructor(context) {
|
||||
context = context || {};
|
||||
let code = 'ERR_SYSTEM_ERROR';
|
||||
if (messages.has(context.code))
|
||||
code = context.code;
|
||||
super(code,
|
||||
context.code,
|
||||
context.syscall,
|
||||
context.path,
|
||||
context.dest,
|
||||
context.message);
|
||||
Object.defineProperty(this, kInfo, {
|
||||
configurable: false,
|
||||
enumerable: false,
|
||||
value: context
|
||||
});
|
||||
}
|
||||
|
||||
get info() {
|
||||
return this[kInfo];
|
||||
}
|
||||
|
||||
get errno() {
|
||||
return this[kInfo].errno;
|
||||
}
|
||||
|
||||
set errno(val) {
|
||||
this[kInfo].errno = val;
|
||||
}
|
||||
|
||||
get syscall() {
|
||||
return this[kInfo].syscall;
|
||||
}
|
||||
|
||||
set syscall(val) {
|
||||
this[kInfo].syscall = val;
|
||||
}
|
||||
|
||||
get path() {
|
||||
return this[kInfo].path !== undefined ?
|
||||
this[kInfo].path.toString() : undefined;
|
||||
}
|
||||
|
||||
set path(val) {
|
||||
this[kInfo].path = val ?
|
||||
lazyBuffer().from(val.toString()) : undefined;
|
||||
}
|
||||
|
||||
get dest() {
|
||||
return this[kInfo].path !== undefined ?
|
||||
this[kInfo].dest.toString() : undefined;
|
||||
}
|
||||
|
||||
set dest(val) {
|
||||
this[kInfo].dest = val ?
|
||||
lazyBuffer().from(val.toString()) : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
class AssertionError extends Error {
|
||||
constructor(options) {
|
||||
if (typeof options !== 'object' || options === null) {
|
||||
throw new exports.TypeError('ERR_INVALID_ARG_TYPE', 'options', 'Object');
|
||||
}
|
||||
var { actual, expected, message, operator, stackStartFunction } = options;
|
||||
if (message) {
|
||||
super(message);
|
||||
} else {
|
||||
if (actual && actual.stack && actual instanceof Error)
|
||||
actual = `${actual.name}: ${actual.message}`;
|
||||
if (expected && expected.stack && expected instanceof Error)
|
||||
expected = `${expected.name}: ${expected.message}`;
|
||||
if (util === null) util = require('util');
|
||||
super(`${util.inspect(actual).slice(0, 128)} ` +
|
||||
`${operator} ${util.inspect(expected).slice(0, 128)}`);
|
||||
}
|
||||
|
||||
this.generatedMessage = !message;
|
||||
this.name = 'AssertionError [ERR_ASSERTION]';
|
||||
this.code = 'ERR_ASSERTION';
|
||||
this.actual = actual;
|
||||
this.expected = expected;
|
||||
this.operator = operator;
|
||||
Error.captureStackTrace(this, stackStartFunction);
|
||||
}
|
||||
}
|
||||
|
||||
// This is defined here instead of using the assert module to avoid a
|
||||
// circular dependency. The effect is largely the same.
|
||||
function internalAssert(condition, message) {
|
||||
if (!condition) {
|
||||
throw new AssertionError({
|
||||
message,
|
||||
actual: false,
|
||||
expected: true,
|
||||
operator: '=='
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function message(key, args) {
|
||||
const msg = messages.get(key);
|
||||
internalAssert(msg, `An invalid error message key was used: ${key}.`);
|
||||
let fmt;
|
||||
if (typeof msg === 'function') {
|
||||
fmt = msg;
|
||||
} else {
|
||||
if (util === null) util = require('util');
|
||||
fmt = util.format;
|
||||
if (args === undefined || args.length === 0)
|
||||
return msg;
|
||||
args.unshift(msg);
|
||||
}
|
||||
return String(fmt.apply(null, args));
|
||||
}
|
||||
|
||||
// Utility function for registering the error codes. Only used here. Exported
|
||||
// *only* to allow for testing.
|
||||
function E(sym, val) {
|
||||
messages.set(sym, typeof val === 'function' ? val : String(val));
|
||||
}
|
||||
|
||||
module.exports = exports = {
|
||||
message,
|
||||
Error: makeNodeError(Error),
|
||||
TypeError: makeNodeError(TypeError),
|
||||
RangeError: makeNodeError(RangeError),
|
||||
URIError: makeNodeError(URIError),
|
||||
AssertionError,
|
||||
SystemError,
|
||||
E // This is exported only to facilitate testing.
|
||||
};
|
||||
|
||||
// To declare an error message, use the E(sym, val) function above. The sym
|
||||
// must be an upper case string. The val can be either a function or a string.
|
||||
// The return value of the function must be a string.
|
||||
// Examples:
|
||||
// E('EXAMPLE_KEY1', 'This is the error value');
|
||||
// E('EXAMPLE_KEY2', (a, b) => return `${a} ${b}`);
|
||||
//
|
||||
// Once an error code has been assigned, the code itself MUST NOT change and
|
||||
// any given error code must never be reused to identify a different error.
|
||||
//
|
||||
// Any error code added here should also be added to the documentation
|
||||
//
|
||||
// Note: Please try to keep these in alphabetical order
|
||||
//
|
||||
// Note: Node.js specific errors must begin with the prefix ERR_
|
||||
|
||||
E('ERR_ARG_NOT_ITERABLE', '%s must be iterable');
|
||||
E('ERR_ASSERTION', '%s');
|
||||
E('ERR_ASYNC_CALLBACK', '%s must be a function');
|
||||
E('ERR_ASYNC_TYPE', 'Invalid name for async "type": %s');
|
||||
E('ERR_BUFFER_OUT_OF_BOUNDS', bufferOutOfBounds);
|
||||
E('ERR_BUFFER_TOO_LARGE',
|
||||
`Cannot create a Buffer larger than 0x${kMaxLength.toString(16)} bytes`);
|
||||
E('ERR_CANNOT_WATCH_SIGINT', 'Cannot watch for SIGINT signals');
|
||||
E('ERR_CHILD_CLOSED_BEFORE_REPLY', 'Child closed before reply received');
|
||||
E('ERR_CONSOLE_WRITABLE_STREAM',
|
||||
'Console expects a writable stream instance for %s');
|
||||
E('ERR_CPU_USAGE', 'Unable to obtain cpu usage %s');
|
||||
E('ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED',
|
||||
'Custom engines not supported by this OpenSSL');
|
||||
E('ERR_CRYPTO_ECDH_INVALID_FORMAT', 'Invalid ECDH format: %s');
|
||||
E('ERR_CRYPTO_ENGINE_UNKNOWN', 'Engine "%s" was not found');
|
||||
E('ERR_CRYPTO_FIPS_FORCED',
|
||||
'Cannot set FIPS mode, it was forced with --force-fips at startup.');
|
||||
E('ERR_CRYPTO_FIPS_UNAVAILABLE', 'Cannot set FIPS mode in a non-FIPS build.');
|
||||
E('ERR_CRYPTO_HASH_DIGEST_NO_UTF16', 'hash.digest() does not support UTF-16');
|
||||
E('ERR_CRYPTO_HASH_FINALIZED', 'Digest already called');
|
||||
E('ERR_CRYPTO_HASH_UPDATE_FAILED', 'Hash update failed');
|
||||
E('ERR_CRYPTO_INVALID_DIGEST', 'Invalid digest: %s');
|
||||
E('ERR_CRYPTO_INVALID_STATE', 'Invalid state for operation %s');
|
||||
E('ERR_CRYPTO_SIGN_KEY_REQUIRED', 'No key provided to sign');
|
||||
E('ERR_CRYPTO_TIMING_SAFE_EQUAL_LENGTH',
|
||||
'Input buffers must have the same length');
|
||||
E('ERR_DNS_SET_SERVERS_FAILED', 'c-ares failed to set servers: "%s" [%s]');
|
||||
E('ERR_ENCODING_INVALID_ENCODED_DATA',
|
||||
'The encoded data was not valid for encoding %s');
|
||||
E('ERR_ENCODING_NOT_SUPPORTED', 'The "%s" encoding is not supported');
|
||||
E('ERR_FALSY_VALUE_REJECTION', 'Promise was rejected with falsy value');
|
||||
E('ERR_HTTP2_CONNECT_AUTHORITY',
|
||||
':authority header is required for CONNECT requests');
|
||||
E('ERR_HTTP2_CONNECT_PATH',
|
||||
'The :path header is forbidden for CONNECT requests');
|
||||
E('ERR_HTTP2_CONNECT_SCHEME',
|
||||
'The :scheme header is forbidden for CONNECT requests');
|
||||
E('ERR_HTTP2_FRAME_ERROR',
|
||||
(type, code, id) => {
|
||||
let msg = `Error sending frame type ${type}`;
|
||||
if (id !== undefined)
|
||||
msg += ` for stream ${id}`;
|
||||
msg += ` with code ${code}`;
|
||||
return msg;
|
||||
});
|
||||
E('ERR_HTTP2_HEADERS_AFTER_RESPOND',
|
||||
'Cannot specify additional headers after response initiated');
|
||||
E('ERR_HTTP2_HEADERS_OBJECT', 'Headers must be an object');
|
||||
E('ERR_HTTP2_HEADERS_SENT', 'Response has already been initiated.');
|
||||
E('ERR_HTTP2_HEADER_REQUIRED', 'The %s header is required');
|
||||
E('ERR_HTTP2_HEADER_SINGLE_VALUE',
|
||||
'Header field "%s" must have only a single value');
|
||||
E('ERR_HTTP2_INFO_HEADERS_AFTER_RESPOND',
|
||||
'Cannot send informational headers after the HTTP message has been sent');
|
||||
E('ERR_HTTP2_INFO_STATUS_NOT_ALLOWED',
|
||||
'Informational status codes cannot be used');
|
||||
E('ERR_HTTP2_INVALID_CONNECTION_HEADERS',
|
||||
'HTTP/1 Connection specific headers are forbidden: "%s"');
|
||||
E('ERR_HTTP2_INVALID_HEADER_VALUE', 'Invalid value "%s" for header "%s"');
|
||||
E('ERR_HTTP2_INVALID_INFO_STATUS',
|
||||
'Invalid informational status code: %s');
|
||||
E('ERR_HTTP2_INVALID_PACKED_SETTINGS_LENGTH',
|
||||
'Packed settings length must be a multiple of six');
|
||||
E('ERR_HTTP2_INVALID_PSEUDOHEADER',
|
||||
'"%s" is an invalid pseudoheader or is used incorrectly');
|
||||
E('ERR_HTTP2_INVALID_SESSION', 'The session has been destroyed');
|
||||
E('ERR_HTTP2_INVALID_SETTING_VALUE',
|
||||
'Invalid value for setting "%s": %s');
|
||||
E('ERR_HTTP2_INVALID_STREAM', 'The stream has been destroyed');
|
||||
E('ERR_HTTP2_MAX_PENDING_SETTINGS_ACK',
|
||||
'Maximum number of pending settings acknowledgements (%s)');
|
||||
E('ERR_HTTP2_NO_SOCKET_MANIPULATION',
|
||||
'HTTP/2 sockets should not be directly manipulated (e.g. read and written)');
|
||||
E('ERR_HTTP2_OUT_OF_STREAMS',
|
||||
'No stream ID is available because maximum stream ID has been reached');
|
||||
E('ERR_HTTP2_PAYLOAD_FORBIDDEN',
|
||||
'Responses with %s status must not have a payload');
|
||||
E('ERR_HTTP2_PSEUDOHEADER_NOT_ALLOWED', 'Cannot set HTTP/2 pseudo-headers');
|
||||
E('ERR_HTTP2_PUSH_DISABLED', 'HTTP/2 client has disabled push streams');
|
||||
E('ERR_HTTP2_SEND_FILE', 'Only regular files can be sent');
|
||||
E('ERR_HTTP2_SOCKET_BOUND',
|
||||
'The socket is already bound to an Http2Session');
|
||||
E('ERR_HTTP2_STATUS_101',
|
||||
'HTTP status code 101 (Switching Protocols) is forbidden in HTTP/2');
|
||||
E('ERR_HTTP2_STATUS_INVALID', 'Invalid status code: %s');
|
||||
E('ERR_HTTP2_STREAM_CLOSED', 'The stream is already closed');
|
||||
E('ERR_HTTP2_STREAM_ERROR', 'Stream closed with error code %s');
|
||||
E('ERR_HTTP2_STREAM_SELF_DEPENDENCY', 'A stream cannot depend on itself');
|
||||
E('ERR_HTTP2_UNSUPPORTED_PROTOCOL', 'protocol "%s" is unsupported.');
|
||||
E('ERR_HTTP_HEADERS_SENT',
|
||||
'Cannot %s headers after they are sent to the client');
|
||||
E('ERR_HTTP_INVALID_CHAR', 'Invalid character in statusMessage.');
|
||||
E('ERR_HTTP_INVALID_HEADER_VALUE', 'Invalid value "%s" for header "%s"');
|
||||
E('ERR_HTTP_INVALID_STATUS_CODE', 'Invalid status code: %s');
|
||||
E('ERR_HTTP_TRAILER_INVALID',
|
||||
'Trailers are invalid with this transfer encoding');
|
||||
E('ERR_INDEX_OUT_OF_RANGE', 'Index out of range');
|
||||
E('ERR_INSPECTOR_ALREADY_CONNECTED', 'The inspector is already connected');
|
||||
E('ERR_INSPECTOR_CLOSED', 'Session was closed');
|
||||
E('ERR_INSPECTOR_NOT_AVAILABLE', 'Inspector is not available');
|
||||
E('ERR_INSPECTOR_NOT_CONNECTED', 'Session is not connected');
|
||||
E('ERR_INVALID_ARG_TYPE', invalidArgType);
|
||||
E('ERR_INVALID_ARG_VALUE', (name, value) =>
|
||||
`The value "${String(value)}" is invalid for argument "${name}"`);
|
||||
E('ERR_INVALID_ARRAY_LENGTH',
|
||||
(name, len, actual) => {
|
||||
internalAssert(typeof actual === 'number', 'actual must be a number');
|
||||
return `The array "${name}" (length ${actual}) must be of length ${len}.`;
|
||||
});
|
||||
E('ERR_INVALID_ASYNC_ID', 'Invalid %s value: %s');
|
||||
E('ERR_INVALID_BUFFER_SIZE', 'Buffer size must be a multiple of %s');
|
||||
E('ERR_INVALID_CALLBACK', 'Callback must be a function');
|
||||
E('ERR_INVALID_CHAR', invalidChar);
|
||||
E('ERR_INVALID_CURSOR_POS',
|
||||
'Cannot set cursor row without setting its column');
|
||||
E('ERR_INVALID_DOMAIN_NAME', 'Unable to determine the domain name');
|
||||
E('ERR_INVALID_FD', '"fd" must be a positive integer: %s');
|
||||
E('ERR_INVALID_FD_TYPE', 'Unsupported fd type: %s');
|
||||
E('ERR_INVALID_FILE_URL_HOST',
|
||||
'File URL host must be "localhost" or empty on %s');
|
||||
E('ERR_INVALID_FILE_URL_PATH', 'File URL path %s');
|
||||
E('ERR_INVALID_HANDLE_TYPE', 'This handle type cannot be sent');
|
||||
E('ERR_INVALID_HTTP_TOKEN', '%s must be a valid HTTP token ["%s"]');
|
||||
E('ERR_INVALID_IP_ADDRESS', 'Invalid IP address: %s');
|
||||
E('ERR_INVALID_OPT_VALUE', (name, value) =>
|
||||
`The value "${String(value)}" is invalid for option "${name}"`);
|
||||
E('ERR_INVALID_OPT_VALUE_ENCODING',
|
||||
'The value "%s" is invalid for option "encoding"');
|
||||
E('ERR_INVALID_PERFORMANCE_MARK', 'The "%s" performance mark has not been set');
|
||||
E('ERR_INVALID_PROTOCOL', 'Protocol "%s" not supported. Expected "%s"');
|
||||
E('ERR_INVALID_REPL_EVAL_CONFIG',
|
||||
'Cannot specify both "breakEvalOnSigint" and "eval" for REPL');
|
||||
E('ERR_INVALID_SYNC_FORK_INPUT',
|
||||
'Asynchronous forks do not support Buffer, Uint8Array or string input: %s');
|
||||
E('ERR_INVALID_THIS', 'Value of "this" must be of type %s');
|
||||
E('ERR_INVALID_TUPLE', '%s must be an iterable %s tuple');
|
||||
E('ERR_INVALID_URI', 'URI malformed');
|
||||
E('ERR_INVALID_URL', 'Invalid URL: %s');
|
||||
E('ERR_INVALID_URL_SCHEME',
|
||||
(expected) => `The URL must be ${oneOf(expected, 'scheme')}`);
|
||||
E('ERR_IPC_CHANNEL_CLOSED', 'Channel closed');
|
||||
E('ERR_IPC_DISCONNECTED', 'IPC channel is already disconnected');
|
||||
E('ERR_IPC_ONE_PIPE', 'Child process can have only one IPC pipe');
|
||||
E('ERR_IPC_SYNC_FORK', 'IPC cannot be used with synchronous forks');
|
||||
E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented');
|
||||
E('ERR_MISSING_ARGS', missingArgs);
|
||||
E('ERR_MISSING_DYNAMIC_INSTANTIATE_HOOK',
|
||||
'The ES Module loader may not return a format of \'dynamic\' when no ' +
|
||||
'dynamicInstantiate function was provided');
|
||||
E('ERR_MISSING_MODULE', 'Cannot find module %s');
|
||||
E('ERR_MODULE_RESOLUTION_LEGACY', '%s not found by import in %s.' +
|
||||
' Legacy behavior in require() would have found it at %s');
|
||||
E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times');
|
||||
E('ERR_NAPI_CONS_FUNCTION', 'Constructor must be a function');
|
||||
E('ERR_NAPI_CONS_PROTOTYPE_OBJECT', 'Constructor.prototype must be an object');
|
||||
E('ERR_NO_CRYPTO', 'Node.js is not compiled with OpenSSL crypto support');
|
||||
E('ERR_NO_ICU', '%s is not supported on Node.js compiled without ICU');
|
||||
E('ERR_NO_LONGER_SUPPORTED', '%s is no longer supported');
|
||||
E('ERR_OUTOFMEMORY', 'Out of memory');
|
||||
E('ERR_OUT_OF_RANGE', 'The "%s" argument is out of range');
|
||||
E('ERR_PARSE_HISTORY_DATA', 'Could not parse history data in %s');
|
||||
E('ERR_REQUIRE_ESM', 'Must use import to load ES Module: %s');
|
||||
E('ERR_SERVER_ALREADY_LISTEN',
|
||||
'Listen method has been called more than once without closing.');
|
||||
E('ERR_SOCKET_ALREADY_BOUND', 'Socket is already bound');
|
||||
E('ERR_SOCKET_BAD_BUFFER_SIZE', 'Buffer size must be a positive integer');
|
||||
E('ERR_SOCKET_BAD_PORT', 'Port should be > 0 and < 65536. Received %s.');
|
||||
E('ERR_SOCKET_BAD_TYPE',
|
||||
'Bad socket type specified. Valid types are: udp4, udp6');
|
||||
E('ERR_SOCKET_BUFFER_SIZE', 'Could not get or set buffer size: %s');
|
||||
E('ERR_SOCKET_CANNOT_SEND', 'Unable to send data');
|
||||
E('ERR_SOCKET_CLOSED', 'Socket is closed');
|
||||
E('ERR_SOCKET_DGRAM_NOT_RUNNING', 'Not running');
|
||||
E('ERR_STDERR_CLOSE', 'process.stderr cannot be closed');
|
||||
E('ERR_STDOUT_CLOSE', 'process.stdout cannot be closed');
|
||||
E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable');
|
||||
E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream');
|
||||
E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF');
|
||||
E('ERR_STREAM_READ_NOT_IMPLEMENTED', '_read() is not implemented');
|
||||
E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event');
|
||||
E('ERR_STREAM_WRAP', 'Stream has StringDecoder set or is in objectMode');
|
||||
E('ERR_STREAM_WRITE_AFTER_END', 'write after end');
|
||||
E('ERR_SYSTEM_ERROR', sysError('A system error occurred'));
|
||||
E('ERR_TLS_CERT_ALTNAME_INVALID',
|
||||
'Hostname/IP does not match certificate\'s altnames: %s');
|
||||
E('ERR_TLS_DH_PARAM_SIZE', 'DH parameter size %s is less than 2048');
|
||||
E('ERR_TLS_HANDSHAKE_TIMEOUT', 'TLS handshake timeout');
|
||||
E('ERR_TLS_RENEGOTIATION_FAILED', 'Failed to renegotiate');
|
||||
E('ERR_TLS_REQUIRED_SERVER_NAME',
|
||||
'"servername" is required parameter for Server.addContext');
|
||||
E('ERR_TLS_SESSION_ATTACK', 'TSL session renegotiation attack detected');
|
||||
E('ERR_TRANSFORM_ALREADY_TRANSFORMING',
|
||||
'Calling transform done when still transforming');
|
||||
E('ERR_TRANSFORM_WITH_LENGTH_0',
|
||||
'Calling transform done when writableState.length != 0');
|
||||
E('ERR_UNESCAPED_CHARACTERS', '%s contains unescaped characters');
|
||||
E('ERR_UNHANDLED_ERROR',
|
||||
(err) => {
|
||||
const msg = 'Unhandled error.';
|
||||
if (err === undefined) return msg;
|
||||
return `${msg} (${err})`;
|
||||
});
|
||||
E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s');
|
||||
E('ERR_UNKNOWN_FILE_EXTENSION', 'Unknown file extension: %s');
|
||||
E('ERR_UNKNOWN_MODULE_FORMAT', 'Unknown module format: %s');
|
||||
E('ERR_UNKNOWN_SIGNAL', 'Unknown signal: %s');
|
||||
E('ERR_UNKNOWN_STDIN_TYPE', 'Unknown stdin file type');
|
||||
E('ERR_UNKNOWN_STREAM_TYPE', 'Unknown stream file type');
|
||||
E('ERR_V8BREAKITERATOR', 'Full ICU data not installed. ' +
|
||||
'See https://github.com/nodejs/node/wiki/Intl');
|
||||
E('ERR_VALID_PERFORMANCE_ENTRY_TYPE',
|
||||
'At least one valid performance entry type is required');
|
||||
E('ERR_VALUE_OUT_OF_RANGE', (start, end, value) => {
|
||||
return `The value of "${start}" must be ${end}. Received "${value}"`;
|
||||
});
|
||||
E('ERR_ZLIB_BINDING_CLOSED', 'zlib binding closed');
|
||||
E('ERR_ZLIB_INITIALIZATION_FAILED', 'Initialization failed');
|
||||
|
||||
function sysError(defaultMessage) {
|
||||
return function(code,
|
||||
syscall,
|
||||
path,
|
||||
dest,
|
||||
message = defaultMessage) {
|
||||
if (code !== undefined)
|
||||
message += `: ${code}`;
|
||||
if (syscall !== undefined) {
|
||||
if (code === undefined)
|
||||
message += ':';
|
||||
message += ` [${syscall}]`;
|
||||
}
|
||||
if (path !== undefined) {
|
||||
message += `: ${path}`;
|
||||
if (dest !== undefined)
|
||||
message += ` => ${dest}`;
|
||||
}
|
||||
return message;
|
||||
};
|
||||
}
|
||||
|
||||
function invalidArgType(name, expected, actual) {
|
||||
internalAssert(name, 'name is required');
|
||||
|
||||
// determiner: 'must be' or 'must not be'
|
||||
let determiner;
|
||||
if (typeof expected === 'string' && expected.startsWith('not ')) {
|
||||
determiner = 'must not be';
|
||||
expected = expected.replace(/^not /, '');
|
||||
} else {
|
||||
determiner = 'must be';
|
||||
}
|
||||
|
||||
let msg;
|
||||
if (Array.isArray(name)) {
|
||||
var names = name.map((val) => `"${val}"`).join(', ');
|
||||
msg = `The ${names} arguments ${determiner} ${oneOf(expected, 'type')}`;
|
||||
} else if (name.endsWith(' argument')) {
|
||||
// for the case like 'first argument'
|
||||
msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`;
|
||||
} else {
|
||||
const type = name.includes('.') ? 'property' : 'argument';
|
||||
msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`;
|
||||
}
|
||||
|
||||
// if actual value received, output it
|
||||
if (arguments.length >= 3) {
|
||||
msg += `. Received type ${actual !== null ? typeof actual : 'null'}`;
|
||||
}
|
||||
return msg;
|
||||
}
|
||||
|
||||
function missingArgs(...args) {
|
||||
internalAssert(args.length > 0, 'At least one arg needs to be specified');
|
||||
let msg = 'The ';
|
||||
const len = args.length;
|
||||
args = args.map((a) => `"${a}"`);
|
||||
switch (len) {
|
||||
case 1:
|
||||
msg += `${args[0]} argument`;
|
||||
break;
|
||||
case 2:
|
||||
msg += `${args[0]} and ${args[1]} arguments`;
|
||||
break;
|
||||
default:
|
||||
msg += args.slice(0, len - 1).join(', ');
|
||||
msg += `, and ${args[len - 1]} arguments`;
|
||||
break;
|
||||
}
|
||||
return `${msg} must be specified`;
|
||||
}
|
||||
|
||||
function oneOf(expected, thing) {
|
||||
internalAssert(expected, 'expected is required');
|
||||
internalAssert(typeof thing === 'string', 'thing is required');
|
||||
if (Array.isArray(expected)) {
|
||||
const len = expected.length;
|
||||
internalAssert(len > 0,
|
||||
'At least one expected value needs to be specified');
|
||||
expected = expected.map((i) => String(i));
|
||||
if (len > 2) {
|
||||
return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` +
|
||||
expected[len - 1];
|
||||
} else if (len === 2) {
|
||||
return `one of ${thing} ${expected[0]} or ${expected[1]}`;
|
||||
} else {
|
||||
return `of ${thing} ${expected[0]}`;
|
||||
}
|
||||
} else {
|
||||
return `of ${thing} ${String(expected)}`;
|
||||
}
|
||||
}
|
||||
|
||||
function bufferOutOfBounds(name, isWriting) {
|
||||
if (isWriting) {
|
||||
return 'Attempt to write outside buffer bounds';
|
||||
} else {
|
||||
return `"${name}" is outside of buffer bounds`;
|
||||
}
|
||||
}
|
||||
|
||||
function invalidChar(name, field) {
|
||||
let msg = `Invalid character in ${name}`;
|
||||
if (field) {
|
||||
msg += ` ["${field}"]`;
|
||||
}
|
||||
return msg;
|
||||
}
|
102
contrib/mORMot/SyNode/core_modules/node_modules/internal/fs.js
generated
vendored
Normal file
102
contrib/mORMot/SyNode/core_modules/node_modules/internal/fs.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
'use strict';
|
||||
|
||||
const { Buffer } = require('buffer');
|
||||
const { Writable } = require('stream');
|
||||
const fs = require('fs');
|
||||
const util = require('util');
|
||||
|
||||
const {
|
||||
O_APPEND,
|
||||
O_CREAT,
|
||||
O_EXCL,
|
||||
O_RDONLY,
|
||||
O_RDWR,
|
||||
O_SYNC,
|
||||
O_TRUNC,
|
||||
O_WRONLY
|
||||
} = process.binding('constants').fs;
|
||||
|
||||
function assertEncoding(encoding) {
|
||||
if (encoding && !Buffer.isEncoding(encoding)) {
|
||||
throw new Error(`Unknown encoding: ${encoding}`);
|
||||
}
|
||||
}
|
||||
|
||||
function stringToFlags(flag) {
|
||||
if (typeof flag === 'number') {
|
||||
return flag;
|
||||
}
|
||||
|
||||
switch (flag) {
|
||||
case 'r' : return O_RDONLY;
|
||||
case 'rs' : // Fall through.
|
||||
case 'sr' : return O_RDONLY | O_SYNC;
|
||||
case 'r+' : return O_RDWR;
|
||||
case 'rs+' : // Fall through.
|
||||
case 'sr+' : return O_RDWR | O_SYNC;
|
||||
|
||||
case 'w' : return O_TRUNC | O_CREAT | O_WRONLY;
|
||||
case 'wx' : // Fall through.
|
||||
case 'xw' : return O_TRUNC | O_CREAT | O_WRONLY | O_EXCL;
|
||||
|
||||
case 'w+' : return O_TRUNC | O_CREAT | O_RDWR;
|
||||
case 'wx+': // Fall through.
|
||||
case 'xw+': return O_TRUNC | O_CREAT | O_RDWR | O_EXCL;
|
||||
|
||||
case 'a' : return O_APPEND | O_CREAT | O_WRONLY;
|
||||
case 'ax' : // Fall through.
|
||||
case 'xa' : return O_APPEND | O_CREAT | O_WRONLY | O_EXCL;
|
||||
|
||||
case 'a+' : return O_APPEND | O_CREAT | O_RDWR;
|
||||
case 'ax+': // Fall through.
|
||||
case 'xa+': return O_APPEND | O_CREAT | O_RDWR | O_EXCL;
|
||||
}
|
||||
|
||||
throw new Error('Unknown file open flag: ' + flag);
|
||||
}
|
||||
|
||||
// Temporary hack for process.stdout and process.stderr when piped to files.
|
||||
function SyncWriteStream(fd, options) {
|
||||
Writable.call(this);
|
||||
|
||||
options = options || {};
|
||||
|
||||
this.fd = fd;
|
||||
this.readable = false;
|
||||
this.autoClose = options.autoClose === undefined ? true : options.autoClose;
|
||||
|
||||
this.on('end', () => this._destroy());
|
||||
}
|
||||
|
||||
util.inherits(SyncWriteStream, Writable);
|
||||
|
||||
SyncWriteStream.prototype._write = function(chunk, encoding, cb) {
|
||||
fs.writeSync(this.fd, chunk, 0, chunk.length);
|
||||
cb();
|
||||
return true;
|
||||
};
|
||||
|
||||
SyncWriteStream.prototype._destroy = function() {
|
||||
if (this.fd === null) // already destroy()ed
|
||||
return;
|
||||
|
||||
if (this.autoClose)
|
||||
fs.closeSync(this.fd);
|
||||
|
||||
this.fd = null;
|
||||
return true;
|
||||
};
|
||||
|
||||
SyncWriteStream.prototype.destroySoon =
|
||||
SyncWriteStream.prototype.destroy = function() {
|
||||
this._destroy();
|
||||
this.emit('close');
|
||||
return true;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
assertEncoding,
|
||||
stringToFlags,
|
||||
SyncWriteStream,
|
||||
realpathCacheKey: Symbol('realpathCacheKey')
|
||||
};
|
149
contrib/mORMot/SyNode/core_modules/node_modules/internal/module.js
generated
vendored
Normal file
149
contrib/mORMot/SyNode/core_modules/node_modules/internal/module.js
generated
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
'use strict';
|
||||
|
||||
// Invoke with makeRequireFunction(module) where |module| is the Module object
|
||||
// to use as the context for the require() function.
|
||||
function makeRequireFunction(mod) {
|
||||
const Module = mod.constructor;
|
||||
|
||||
function require(path) {
|
||||
try {
|
||||
exports.requireDepth += 1;
|
||||
return mod.require(path);
|
||||
} finally {
|
||||
exports.requireDepth -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
function resolve(request, options) {
|
||||
return Module._resolveFilename(request, mod, false, options);
|
||||
}
|
||||
|
||||
require.resolve = resolve;
|
||||
|
||||
function paths(request) {
|
||||
return Module._resolveLookupPaths(request, mod, true);
|
||||
}
|
||||
|
||||
resolve.paths = paths;
|
||||
|
||||
require.main = process.mainModule;
|
||||
|
||||
// Enable support to add extra extension types.
|
||||
require.extensions = Module._extensions;
|
||||
|
||||
require.cache = Module._cache;
|
||||
|
||||
return require;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
|
||||
* because the buffer-to-string conversion in `fs.readFileSync()`
|
||||
* translates it to FEFF, the UTF-16 BOM.
|
||||
*/
|
||||
function stripBOM(content) {
|
||||
if (content.charCodeAt(0) === 0xFEFF) {
|
||||
content = content.slice(1);
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find end of shebang line and slice it off
|
||||
*/
|
||||
function stripShebang(content) {
|
||||
// Remove shebang
|
||||
var contLen = content.length;
|
||||
if (contLen >= 2) {
|
||||
if (content.charCodeAt(0) === 35/*#*/ &&
|
||||
content.charCodeAt(1) === 33/*!*/) {
|
||||
if (contLen === 2) {
|
||||
// Exact match
|
||||
content = '';
|
||||
} else {
|
||||
// Find end of shebang line and slice it off
|
||||
var i = 2;
|
||||
for (; i < contLen; ++i) {
|
||||
var code = content.charCodeAt(i);
|
||||
if (code === 10/*\n*/ || code === 13/*\r*/)
|
||||
break;
|
||||
}
|
||||
if (i === contLen)
|
||||
content = '';
|
||||
else {
|
||||
// Note that this actually includes the newline character(s) in the
|
||||
// new output. This duplicates the behavior of the regular expression
|
||||
// that was previously used to replace the shebang line
|
||||
content = content.slice(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
const builtinLibs = [
|
||||
'assert', 'async_hooks', 'buffer', 'child_process', 'cluster', 'crypto',
|
||||
'dgram', 'dns', 'domain', 'events', 'fs', 'http', 'https', 'net',
|
||||
'os', 'path', 'perf_hooks', 'punycode', 'querystring', 'readline', 'repl',
|
||||
'stream', 'string_decoder', 'tls', 'tty', 'url', 'util', 'v8', 'vm', 'zlib'
|
||||
];
|
||||
|
||||
/* SyNode
|
||||
const { exposeHTTP2 } = process.binding('config');
|
||||
if (exposeHTTP2)
|
||||
builtinLibs.push('http2');
|
||||
|
||||
if (typeof process.binding('inspector').connect === 'function') {
|
||||
builtinLibs.push('inspector');
|
||||
builtinLibs.sort();
|
||||
}
|
||||
*/
|
||||
|
||||
function addBuiltinLibsToObject(object) {
|
||||
// Make built-in modules available directly (loaded lazily).
|
||||
builtinLibs.forEach((name) => {
|
||||
// Goals of this mechanism are:
|
||||
// - Lazy loading of built-in modules
|
||||
// - Having all built-in modules available as non-enumerable properties
|
||||
// - Allowing the user to re-assign these variables as if there were no
|
||||
// pre-existing globals with the same name.
|
||||
|
||||
const setReal = (val) => {
|
||||
// Deleting the property before re-assigning it disables the
|
||||
// getter/setter mechanism.
|
||||
delete object[name];
|
||||
object[name] = val;
|
||||
};
|
||||
|
||||
Object.defineProperty(object, name, {
|
||||
get: () => {
|
||||
const lib = require(name);
|
||||
|
||||
// Disable the current getter/setter and set up a new
|
||||
// non-enumerable property.
|
||||
delete object[name];
|
||||
Object.defineProperty(object, name, {
|
||||
get: () => lib,
|
||||
set: setReal,
|
||||
configurable: true,
|
||||
enumerable: false
|
||||
});
|
||||
|
||||
return lib;
|
||||
},
|
||||
set: setReal,
|
||||
configurable: true,
|
||||
enumerable: false
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = exports = {
|
||||
addBuiltinLibsToObject,
|
||||
builtinLibs,
|
||||
makeRequireFunction,
|
||||
requireDepth: 0,
|
||||
stripBOM,
|
||||
stripShebang
|
||||
};
|
18
contrib/mORMot/SyNode/core_modules/node_modules/internal/net.js
generated
vendored
Normal file
18
contrib/mORMot/SyNode/core_modules/node_modules/internal/net.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = { isLegalPort, assertPort };
|
||||
|
||||
// Check that the port number is not NaN when coerced to a number,
|
||||
// is an integer and that it falls within the legal range of port numbers.
|
||||
function isLegalPort(port) {
|
||||
if ((typeof port !== 'number' && typeof port !== 'string') ||
|
||||
(typeof port === 'string' && port.trim().length === 0))
|
||||
return false;
|
||||
return +port === (+port >>> 0) && port <= 0xFFFF;
|
||||
}
|
||||
|
||||
|
||||
function assertPort(port) {
|
||||
if (typeof port !== 'undefined' && !isLegalPort(port))
|
||||
throw new RangeError('"port" argument must be >= 0 and < 65536');
|
||||
}
|
175
contrib/mORMot/SyNode/core_modules/node_modules/internal/process/stdio.js
generated
vendored
Normal file
175
contrib/mORMot/SyNode/core_modules/node_modules/internal/process/stdio.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
'use strict';
|
||||
|
||||
exports.setup = setupStdio;
|
||||
|
||||
function setupStdio() {
|
||||
var stdin, stdout, stderr;
|
||||
|
||||
function getStdout() {
|
||||
if (stdout) return stdout;
|
||||
stdout = createWritableStdioStream(1);
|
||||
stdout.destroy = stdout.destroySoon = function(er) {
|
||||
er = er || new Error('process.stdout cannot be closed.');
|
||||
stdout.emit('error', er);
|
||||
};
|
||||
if (stdout.isTTY) {
|
||||
process.on('SIGWINCH', () => stdout._refreshSize());
|
||||
}
|
||||
return stdout;
|
||||
}
|
||||
|
||||
function getStderr() {
|
||||
if (stderr) return stderr;
|
||||
stderr = createWritableStdioStream(2);
|
||||
stderr.destroy = stderr.destroySoon = function(er) {
|
||||
er = er || new Error('process.stderr cannot be closed.');
|
||||
stderr.emit('error', er);
|
||||
};
|
||||
if (stderr.isTTY) {
|
||||
process.on('SIGWINCH', () => stderr._refreshSize());
|
||||
}
|
||||
return stderr;
|
||||
}
|
||||
|
||||
function getStdin() {
|
||||
if (stdin) return stdin;
|
||||
|
||||
const tty_wrap = process.binding('tty_wrap');
|
||||
const fd = 0;
|
||||
|
||||
switch (tty_wrap.guessHandleType(fd)) {
|
||||
case 'TTY':
|
||||
const tty = require('tty');
|
||||
stdin = new tty.ReadStream(fd, {
|
||||
highWaterMark: 0,
|
||||
readable: true,
|
||||
writable: false
|
||||
});
|
||||
break;
|
||||
|
||||
case 'FILE':
|
||||
const fs = require('fs');
|
||||
stdin = new fs.ReadStream(null, { fd: fd, autoClose: false });
|
||||
break;
|
||||
|
||||
case 'PIPE':
|
||||
case 'TCP':
|
||||
const net = require('net');
|
||||
|
||||
// It could be that process has been started with an IPC channel
|
||||
// sitting on fd=0, in such case the pipe for this fd is already
|
||||
// present and creating a new one will lead to the assertion failure
|
||||
// in libuv.
|
||||
if (process._channel && process._channel.fd === fd) {
|
||||
stdin = new net.Socket({
|
||||
handle: process._channel,
|
||||
readable: true,
|
||||
writable: false
|
||||
});
|
||||
} else {
|
||||
stdin = new net.Socket({
|
||||
fd: fd,
|
||||
readable: true,
|
||||
writable: false
|
||||
});
|
||||
}
|
||||
// Make sure the stdin can't be `.end()`-ed
|
||||
stdin._writableState.ended = true;
|
||||
break;
|
||||
|
||||
default:
|
||||
// Probably an error on in uv_guess_handle()
|
||||
throw new Error('Implement me. Unknown stdin file type!');
|
||||
}
|
||||
|
||||
// For supporting legacy API we put the FD here.
|
||||
stdin.fd = fd;
|
||||
|
||||
// stdin starts out life in a paused state, but node doesn't
|
||||
// know yet. Explicitly to readStop() it to put it in the
|
||||
// not-reading state.
|
||||
if (stdin._handle && stdin._handle.readStop) {
|
||||
stdin._handle.reading = false;
|
||||
stdin._readableState.reading = false;
|
||||
stdin._handle.readStop();
|
||||
}
|
||||
|
||||
// if the user calls stdin.pause(), then we need to stop reading
|
||||
// immediately, so that the process can close down.
|
||||
stdin.on('pause', () => {
|
||||
if (!stdin._handle)
|
||||
return;
|
||||
stdin._readableState.reading = false;
|
||||
stdin._handle.reading = false;
|
||||
stdin._handle.readStop();
|
||||
});
|
||||
|
||||
return stdin;
|
||||
}
|
||||
|
||||
Object.defineProperty(process, 'stdout', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: getStdout
|
||||
});
|
||||
|
||||
Object.defineProperty(process, 'stderr', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: getStderr
|
||||
});
|
||||
|
||||
Object.defineProperty(process, 'stdin', {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get: getStdin
|
||||
});
|
||||
|
||||
process.openStdin = function() {
|
||||
process.stdin.resume();
|
||||
return process.stdin;
|
||||
};
|
||||
}
|
||||
|
||||
function createWritableStdioStream(fd) {
|
||||
var stream;
|
||||
const tty_wrap = process.binding('tty_wrap');
|
||||
|
||||
// Note stream._type is used for test-module-load-list.js
|
||||
|
||||
switch (tty_wrap.guessHandleType(fd)) {
|
||||
case 'TTY':
|
||||
const tty = require('tty');
|
||||
stream = new tty.WriteStream(fd);
|
||||
stream._type = 'tty';
|
||||
break;
|
||||
|
||||
case 'FILE':
|
||||
const fs = require('internal/fs');
|
||||
stream = new fs.SyncWriteStream(fd, { autoClose: false });
|
||||
stream._type = 'fs';
|
||||
break;
|
||||
|
||||
case 'PIPE':
|
||||
case 'TCP':
|
||||
const net = require('net');
|
||||
stream = new net.Socket({
|
||||
fd: fd,
|
||||
readable: false,
|
||||
writable: true
|
||||
});
|
||||
stream._type = 'pipe';
|
||||
break;
|
||||
|
||||
default:
|
||||
// Probably an error on in uv_guess_handle()
|
||||
throw new Error('Implement me. Unknown stream file type!');
|
||||
}
|
||||
|
||||
// For supporting legacy API we put the FD here.
|
||||
stream.fd = fd;
|
||||
|
||||
stream._isStdio = true;
|
||||
|
||||
return stream;
|
||||
}
|
29
contrib/mORMot/SyNode/core_modules/node_modules/internal/querystring.js
generated
vendored
Normal file
29
contrib/mORMot/SyNode/core_modules/node_modules/internal/querystring.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict';
|
||||
|
||||
const hexTable = new Array(256);
|
||||
for (var i = 0; i < 256; ++i)
|
||||
hexTable[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase();
|
||||
|
||||
const isHexTable = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 32 - 47
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63
|
||||
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 64 - 79
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 80 - 95
|
||||
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 96 - 111
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 112 - 127
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 128 ...
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 // ... 256
|
||||
];
|
||||
|
||||
module.exports = {
|
||||
hexTable,
|
||||
isHexTable
|
||||
};
|
72
contrib/mORMot/SyNode/core_modules/node_modules/internal/streams/BufferList.js
generated
vendored
Normal file
72
contrib/mORMot/SyNode/core_modules/node_modules/internal/streams/BufferList.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict';
|
||||
|
||||
const Buffer = require('buffer').Buffer;
|
||||
|
||||
module.exports = BufferList;
|
||||
|
||||
function BufferList() {
|
||||
this.head = null;
|
||||
this.tail = null;
|
||||
this.length = 0;
|
||||
}
|
||||
|
||||
BufferList.prototype.push = function(v) {
|
||||
const entry = { data: v, next: null };
|
||||
if (this.length > 0)
|
||||
this.tail.next = entry;
|
||||
else
|
||||
this.head = entry;
|
||||
this.tail = entry;
|
||||
++this.length;
|
||||
};
|
||||
|
||||
BufferList.prototype.unshift = function(v) {
|
||||
const entry = { data: v, next: this.head };
|
||||
if (this.length === 0)
|
||||
this.tail = entry;
|
||||
this.head = entry;
|
||||
++this.length;
|
||||
};
|
||||
|
||||
BufferList.prototype.shift = function() {
|
||||
if (this.length === 0)
|
||||
return;
|
||||
const ret = this.head.data;
|
||||
if (this.length === 1)
|
||||
this.head = this.tail = null;
|
||||
else
|
||||
this.head = this.head.next;
|
||||
--this.length;
|
||||
return ret;
|
||||
};
|
||||
|
||||
BufferList.prototype.clear = function() {
|
||||
this.head = this.tail = null;
|
||||
this.length = 0;
|
||||
};
|
||||
|
||||
BufferList.prototype.join = function(s) {
|
||||
if (this.length === 0)
|
||||
return '';
|
||||
var p = this.head;
|
||||
var ret = '' + p.data;
|
||||
while (p = p.next)
|
||||
ret += s + p.data;
|
||||
return ret;
|
||||
};
|
||||
|
||||
BufferList.prototype.concat = function(n) {
|
||||
if (this.length === 0)
|
||||
return Buffer.alloc(0);
|
||||
if (this.length === 1)
|
||||
return this.head.data;
|
||||
const ret = Buffer.allocUnsafe(n >>> 0);
|
||||
var p = this.head;
|
||||
var i = 0;
|
||||
while (p) {
|
||||
p.data.copy(ret, i);
|
||||
i += p.data.length;
|
||||
p = p.next;
|
||||
}
|
||||
return ret;
|
||||
};
|
39
contrib/mORMot/SyNode/core_modules/node_modules/internal/streams/lazy_transform.js
generated
vendored
Normal file
39
contrib/mORMot/SyNode/core_modules/node_modules/internal/streams/lazy_transform.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
// LazyTransform is a special type of Transform stream that is lazily loaded.
|
||||
// This is used for performance with bi-API-ship: when two APIs are available
|
||||
// for the stream, one conventional and one non-conventional.
|
||||
'use strict';
|
||||
|
||||
const stream = require('stream');
|
||||
const util = require('util');
|
||||
|
||||
module.exports = LazyTransform;
|
||||
|
||||
function LazyTransform(options) {
|
||||
this._options = options;
|
||||
}
|
||||
util.inherits(LazyTransform, stream.Transform);
|
||||
|
||||
[
|
||||
'_readableState',
|
||||
'_writableState',
|
||||
'_transformState'
|
||||
].forEach(function(prop, i, props) {
|
||||
Object.defineProperty(LazyTransform.prototype, prop, {
|
||||
get: function() {
|
||||
stream.Transform.call(this, this._options);
|
||||
this._writableState.decodeStrings = false;
|
||||
this._writableState.defaultEncoding = 'latin1';
|
||||
return this[prop];
|
||||
},
|
||||
set: function(val) {
|
||||
Object.defineProperty(this, prop, {
|
||||
value: val,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
});
|
||||
},
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
});
|
||||
});
|
168
contrib/mORMot/SyNode/core_modules/node_modules/internal/util.js
generated
vendored
Normal file
168
contrib/mORMot/SyNode/core_modules/node_modules/internal/util.js
generated
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
'use strict';
|
||||
debugger;
|
||||
const binding = process.binding('util');
|
||||
/* Orel
|
||||
const prefix = `(${process.release.name}:${process.pid}) `;
|
||||
|
||||
const kArrowMessagePrivateSymbolIndex = binding['arrow_message_private_symbol'];
|
||||
const kDecoratedPrivateSymbolIndex = binding['decorated_private_symbol'];
|
||||
|
||||
exports.getHiddenValue = binding.getHiddenValue;
|
||||
exports.setHiddenValue = binding.setHiddenValue;
|
||||
*/
|
||||
// The `buffer` module uses this. Defining it here instead of in the public
|
||||
// `util` module makes it accessible without having to `require('util')` there.
|
||||
exports.customInspectSymbol = Symbol('util.inspect.custom');
|
||||
|
||||
// All the internal deprecations have to use this function only, as this will
|
||||
// prepend the prefix to the actual message.
|
||||
exports.deprecate = function(fn, msg) {
|
||||
return exports._deprecate(fn, msg);
|
||||
};
|
||||
|
||||
/* Orel
|
||||
exports.error = function(msg) {
|
||||
const fmt = `${prefix}${msg}`;
|
||||
if (arguments.length > 1) {
|
||||
const args = new Array(arguments.length);
|
||||
args[0] = fmt;
|
||||
for (let i = 1; i < arguments.length; ++i)
|
||||
args[i] = arguments[i];
|
||||
console.error.apply(console, args);
|
||||
} else {
|
||||
console.error(fmt);
|
||||
}
|
||||
};
|
||||
|
||||
exports.trace = function(msg) {
|
||||
console.trace(`${prefix}${msg}`);
|
||||
};*/
|
||||
|
||||
// Mark that a method should not be used.
|
||||
// Returns a modified function which warns once by default.
|
||||
// If --no-deprecation is set, then it is a no-op.
|
||||
exports._deprecate = function(fn, msg) {
|
||||
// Allow for deprecating things in the process of starting up.
|
||||
if (global.process === undefined) {
|
||||
return function() {
|
||||
return exports._deprecate(fn, msg).apply(this, arguments);
|
||||
};
|
||||
}
|
||||
|
||||
if (process.noDeprecation === true) {
|
||||
return fn;
|
||||
}
|
||||
|
||||
var warned = false;
|
||||
function deprecated() {
|
||||
if (!warned) {
|
||||
warned = true;
|
||||
process.emitWarning(msg, 'DeprecationWarning', deprecated);
|
||||
}
|
||||
if (new.target) {
|
||||
return Reflect.construct(fn, arguments, new.target);
|
||||
}
|
||||
return fn.apply(this, arguments);
|
||||
}
|
||||
|
||||
// The wrapper will keep the same prototype as fn to maintain prototype chain
|
||||
Object.setPrototypeOf(deprecated, fn);
|
||||
if (fn.prototype) {
|
||||
// Setting this (rather than using Object.setPrototype, as above) ensures
|
||||
// that calling the unwrapped constructor gives an instanceof the wrapped
|
||||
// constructor.
|
||||
deprecated.prototype = fn.prototype;
|
||||
}
|
||||
|
||||
return deprecated;
|
||||
};
|
||||
/* Orel
|
||||
exports.decorateErrorStack = function decorateErrorStack(err) {
|
||||
if (!(exports.isError(err) && err.stack) ||
|
||||
exports.getHiddenValue(err, kDecoratedPrivateSymbolIndex) === true)
|
||||
return;
|
||||
|
||||
const arrow = exports.getHiddenValue(err, kArrowMessagePrivateSymbolIndex);
|
||||
|
||||
if (arrow) {
|
||||
err.stack = arrow + err.stack;
|
||||
exports.setHiddenValue(err, kDecoratedPrivateSymbolIndex, true);
|
||||
}
|
||||
};*/
|
||||
|
||||
exports.isError = function isError(e) {
|
||||
return exports.objectToString(e) === '[object Error]' || e instanceof Error;
|
||||
};
|
||||
|
||||
exports.objectToString = function objectToString(o) {
|
||||
return Object.prototype.toString.call(o);
|
||||
};
|
||||
|
||||
/* Orel
|
||||
const noCrypto = !process.versions.openssl;
|
||||
*/
|
||||
const noCrypto = false;
|
||||
exports.assertCrypto = function(exports) {
|
||||
if (noCrypto)
|
||||
throw new Error('Node.js is not compiled with openssl crypto support');
|
||||
};
|
||||
|
||||
exports.kIsEncodingSymbol = Symbol('node.isEncoding');
|
||||
exports.normalizeEncoding = function normalizeEncoding(enc) {
|
||||
if (!enc) return 'utf8';
|
||||
var low;
|
||||
for (;;) {
|
||||
switch (enc) {
|
||||
case 'utf8':
|
||||
case 'utf-8':
|
||||
return 'utf8';
|
||||
case 'ucs2':
|
||||
case 'utf16le':
|
||||
case 'ucs-2':
|
||||
case 'utf-16le':
|
||||
return 'utf16le';
|
||||
case 'binary':
|
||||
return 'latin1';
|
||||
case 'base64':
|
||||
case 'ascii':
|
||||
case 'latin1':
|
||||
case 'hex':
|
||||
return enc;
|
||||
default:
|
||||
if (low) return; // undefined
|
||||
enc = ('' + enc).toLowerCase();
|
||||
low = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Filters duplicate strings. Used to support functions in crypto and tls
|
||||
// modules. Implemented specifically to maintain existing behaviors in each.
|
||||
exports.filterDuplicateStrings = function filterDuplicateStrings(items, low) {
|
||||
if (!Array.isArray(items))
|
||||
return [];
|
||||
const len = items.length;
|
||||
if (len <= 1)
|
||||
return items;
|
||||
const map = new Map();
|
||||
for (var i = 0; i < len; i++) {
|
||||
const item = items[i];
|
||||
const key = item.toLowerCase();
|
||||
if (low) {
|
||||
map.set(key, key);
|
||||
} else {
|
||||
if (!map.has(key) || map.get(key) <= item)
|
||||
map.set(key, item);
|
||||
}
|
||||
}
|
||||
return Array.from(map.values()).sort();
|
||||
};
|
||||
|
||||
exports.cachedResult = function cachedResult(fn) {
|
||||
var result;
|
||||
return () => {
|
||||
if (result === undefined)
|
||||
result = fn();
|
||||
return result;
|
||||
};
|
||||
};
|
751
contrib/mORMot/SyNode/core_modules/node_modules/module.js
generated
vendored
Normal file
751
contrib/mORMot/SyNode/core_modules/node_modules/module.js
generated
vendored
Normal file
@@ -0,0 +1,751 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the
|
||||
// "Software"), to deal in the Software without restriction, including
|
||||
// without limitation the rights to use, copy, modify, merge, publish,
|
||||
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
// persons to whom the Software is furnished to do so, subject to the
|
||||
// following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
'use strict';
|
||||
|
||||
const NativeModule = require('native_module');
|
||||
const util = require('util');
|
||||
const internalModule = require('internal/module');
|
||||
// SyNode const { getURLFromFilePath } = require('internal/url');
|
||||
const vm = require('vm');
|
||||
const assert = require('assert').ok;
|
||||
const fs = require('fs');
|
||||
const internalFS = require('internal/fs');
|
||||
const path = require('path');
|
||||
const {
|
||||
internalModuleReadFile,
|
||||
internalModuleStat
|
||||
} = process.binding('fs');
|
||||
/* SyNode
|
||||
const preserveSymlinks = !!process.binding('config').preserveSymlinks;
|
||||
const experimentalModules = !!process.binding('config').experimentalModules;
|
||||
|
||||
const errors = require('internal/errors');
|
||||
|
||||
const Loader = require('internal/loader/Loader');
|
||||
const ModuleJob = require('internal/loader/ModuleJob');
|
||||
const { createDynamicModule } = require('internal/loader/ModuleWrap');
|
||||
*/
|
||||
const preserveSymlinks = false;
|
||||
const experimentalModules = false;
|
||||
const errors = require('internal/errors');
|
||||
//end SyNode
|
||||
let ESMLoader;
|
||||
|
||||
function stat(filename) {
|
||||
filename = path._makeLong(filename);
|
||||
const cache = stat.cache;
|
||||
if (cache !== null) {
|
||||
const result = cache.get(filename);
|
||||
if (result !== undefined) return result;
|
||||
}
|
||||
const result = internalModuleStat(filename);
|
||||
if (cache !== null) cache.set(filename, result);
|
||||
return result;
|
||||
}
|
||||
stat.cache = null;
|
||||
|
||||
function updateChildren(parent, child, scan) {
|
||||
var children = parent && parent.children;
|
||||
if (children && !(scan && children.includes(child)))
|
||||
children.push(child);
|
||||
}
|
||||
|
||||
function Module(id, parent) {
|
||||
this.id = id;
|
||||
this.exports = {};
|
||||
this.parent = parent;
|
||||
updateChildren(parent, this, false);
|
||||
this.filename = null;
|
||||
this.loaded = false;
|
||||
this.children = [];
|
||||
}
|
||||
module.exports = Module;
|
||||
|
||||
Module._cache = Object.create(null);
|
||||
Module._pathCache = Object.create(null);
|
||||
Module._extensions = Object.create(null);
|
||||
var modulePaths = [];
|
||||
Module.globalPaths = [];
|
||||
|
||||
Module.wrapper = NativeModule.wrapper;
|
||||
Module.wrap = NativeModule.wrap;
|
||||
Module._debug = util.debuglog('module');
|
||||
|
||||
// We use this alias for the preprocessor that filters it out
|
||||
const debug = Module._debug;
|
||||
|
||||
|
||||
// given a module name, and a list of paths to test, returns the first
|
||||
// matching file in the following precedence.
|
||||
//
|
||||
// require("a.<ext>")
|
||||
// -> a.<ext>
|
||||
//
|
||||
// require("a")
|
||||
// -> a
|
||||
// -> a.<ext>
|
||||
// -> a/index.<ext>
|
||||
|
||||
// check if the directory is a package.json dir
|
||||
const packageMainCache = Object.create(null);
|
||||
|
||||
function readPackage(requestPath) {
|
||||
const entry = packageMainCache[requestPath];
|
||||
if (entry)
|
||||
return entry;
|
||||
|
||||
const jsonPath = path.resolve(requestPath, 'package.json');
|
||||
const json = internalModuleReadFile(path._makeLong(jsonPath));
|
||||
|
||||
if (json === undefined) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
var pkg = packageMainCache[requestPath] = JSON.parse(json).main;
|
||||
} catch (e) {
|
||||
e.path = jsonPath;
|
||||
e.message = 'Error parsing ' + jsonPath + ': ' + e.message;
|
||||
throw e;
|
||||
}
|
||||
return pkg;
|
||||
}
|
||||
|
||||
function tryPackage(requestPath, exts, isMain) {
|
||||
var pkg = readPackage(requestPath);
|
||||
|
||||
if (!pkg) return false;
|
||||
|
||||
var filename = path.resolve(requestPath, pkg);
|
||||
return tryFile(filename, isMain) ||
|
||||
tryExtensions(filename, exts, isMain) ||
|
||||
tryExtensions(path.resolve(filename, 'index'), exts, isMain);
|
||||
}
|
||||
|
||||
// In order to minimize unnecessary lstat() calls,
|
||||
// this cache is a list of known-real paths.
|
||||
// Set to an empty Map to reset.
|
||||
const realpathCache = new Map();
|
||||
|
||||
// check if the file exists and is not a directory
|
||||
// if using --preserve-symlinks and isMain is false,
|
||||
// keep symlinks intact, otherwise resolve to the
|
||||
// absolute realpath.
|
||||
function tryFile(requestPath, isMain) {
|
||||
const rc = stat(requestPath);
|
||||
if (preserveSymlinks && !isMain) {
|
||||
return rc === 0 && path.resolve(requestPath);
|
||||
}
|
||||
return rc === 0 && toRealPath(requestPath);
|
||||
}
|
||||
|
||||
function toRealPath(requestPath) {
|
||||
return fs.realpathSync(requestPath, {
|
||||
[internalFS.realpathCacheKey]: realpathCache
|
||||
});
|
||||
}
|
||||
|
||||
// given a path, check if the file exists with any of the set extensions
|
||||
function tryExtensions(p, exts, isMain) {
|
||||
for (var i = 0; i < exts.length; i++) {
|
||||
const filename = tryFile(p + exts[i], isMain);
|
||||
|
||||
if (filename) {
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
var warned = false;
|
||||
Module._findPath = function(request, paths, isMain) {
|
||||
if (path.isAbsolute(request)) {
|
||||
paths = [''];
|
||||
} else if (!paths || paths.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var cacheKey = request + '\x00' +
|
||||
(paths.length === 1 ? paths[0] : paths.join('\x00'));
|
||||
var entry = Module._pathCache[cacheKey];
|
||||
if (entry)
|
||||
return entry;
|
||||
|
||||
var exts;
|
||||
var trailingSlash = request.length > 0 &&
|
||||
request.charCodeAt(request.length - 1) === 47/*/*/;
|
||||
|
||||
// For each path
|
||||
for (var i = 0; i < paths.length; i++) {
|
||||
// Don't search further if path doesn't exist
|
||||
const curPath = paths[i];
|
||||
if (curPath && stat(curPath) < 1) continue;
|
||||
var basePath = path.resolve(curPath, request);
|
||||
var filename;
|
||||
|
||||
var rc = stat(basePath);
|
||||
if (!trailingSlash) {
|
||||
if (rc === 0) { // File.
|
||||
if (preserveSymlinks && !isMain) {
|
||||
filename = path.resolve(basePath);
|
||||
} else {
|
||||
filename = toRealPath(basePath);
|
||||
}
|
||||
} else if (rc === 1) { // Directory.
|
||||
if (exts === undefined)
|
||||
exts = Object.keys(Module._extensions);
|
||||
filename = tryPackage(basePath, exts, isMain);
|
||||
}
|
||||
|
||||
if (!filename) {
|
||||
// try it with each of the extensions
|
||||
if (exts === undefined)
|
||||
exts = Object.keys(Module._extensions);
|
||||
filename = tryExtensions(basePath, exts, isMain);
|
||||
}
|
||||
}
|
||||
|
||||
if (!filename && rc === 1) { // Directory.
|
||||
if (exts === undefined)
|
||||
exts = Object.keys(Module._extensions);
|
||||
filename = tryPackage(basePath, exts, isMain) ||
|
||||
// try it with each of the extensions at "index"
|
||||
tryExtensions(path.resolve(basePath, 'index'), exts, isMain);
|
||||
}
|
||||
|
||||
if (filename) {
|
||||
// Warn once if '.' resolved outside the module dir
|
||||
if (request === '.' && i > 0) {
|
||||
if (!warned) {
|
||||
warned = true;
|
||||
process.emitWarning(
|
||||
'warning: require(\'.\') resolved outside the package ' +
|
||||
'directory. This functionality is deprecated and will be removed ' +
|
||||
'soon.',
|
||||
'DeprecationWarning', 'DEP0019');
|
||||
}
|
||||
}
|
||||
|
||||
Module._pathCache[cacheKey] = filename;
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
// 'node_modules' character codes reversed
|
||||
var nmChars = [ 115, 101, 108, 117, 100, 111, 109, 95, 101, 100, 111, 110 ];
|
||||
var nmLen = nmChars.length;
|
||||
if (process.platform === 'win32') {
|
||||
// 'from' is the __dirname of the module.
|
||||
Module._nodeModulePaths = function(from) {
|
||||
// guarantee that 'from' is absolute.
|
||||
from = path.resolve(from);
|
||||
|
||||
// note: this approach *only* works when the path is guaranteed
|
||||
// to be absolute. Doing a fully-edge-case-correct path.split
|
||||
// that works on both Windows and Posix is non-trivial.
|
||||
|
||||
// return root node_modules when path is 'D:\\'.
|
||||
// path.resolve will make sure from.length >=3 in Windows.
|
||||
if (from.charCodeAt(from.length - 1) === 92/*\*/ &&
|
||||
from.charCodeAt(from.length - 2) === 58/*:*/)
|
||||
return [from + 'node_modules'];
|
||||
|
||||
const paths = [];
|
||||
var p = 0;
|
||||
var last = from.length;
|
||||
for (var i = from.length - 1; i >= 0; --i) {
|
||||
const code = from.charCodeAt(i);
|
||||
// The path segment separator check ('\' and '/') was used to get
|
||||
// node_modules path for every path segment.
|
||||
// Use colon as an extra condition since we can get node_modules
|
||||
// path for drive root like 'C:\node_modules' and don't need to
|
||||
// parse drive name.
|
||||
if (code === 92/*\*/ || code === 47/*/*/ || code === 58/*:*/) {
|
||||
if (p !== nmLen)
|
||||
paths.push(from.slice(0, last) + '\\node_modules');
|
||||
last = i;
|
||||
p = 0;
|
||||
} else if (p !== -1) {
|
||||
if (nmChars[p] === code) {
|
||||
++p;
|
||||
} else {
|
||||
p = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
};
|
||||
} else { // posix
|
||||
// 'from' is the __dirname of the module.
|
||||
Module._nodeModulePaths = function(from) {
|
||||
// guarantee that 'from' is absolute.
|
||||
from = path.resolve(from);
|
||||
// Return early not only to avoid unnecessary work, but to *avoid* returning
|
||||
// an array of two items for a root: [ '//node_modules', '/node_modules' ]
|
||||
if (from === '/')
|
||||
return ['/node_modules'];
|
||||
|
||||
// note: this approach *only* works when the path is guaranteed
|
||||
// to be absolute. Doing a fully-edge-case-correct path.split
|
||||
// that works on both Windows and Posix is non-trivial.
|
||||
const paths = [];
|
||||
var p = 0;
|
||||
var last = from.length;
|
||||
for (var i = from.length - 1; i >= 0; --i) {
|
||||
const code = from.charCodeAt(i);
|
||||
if (code === 47/*/*/) {
|
||||
if (p !== nmLen)
|
||||
paths.push(from.slice(0, last) + '/node_modules');
|
||||
last = i;
|
||||
p = 0;
|
||||
} else if (p !== -1) {
|
||||
if (nmChars[p] === code) {
|
||||
++p;
|
||||
} else {
|
||||
p = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Append /node_modules to handle root paths.
|
||||
paths.push('/node_modules');
|
||||
|
||||
return paths;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// 'index.' character codes
|
||||
var indexChars = [ 105, 110, 100, 101, 120, 46 ];
|
||||
var indexLen = indexChars.length;
|
||||
Module._resolveLookupPaths = function(request, parent, newReturn) {
|
||||
if (NativeModule.nonInternalExists(request)) {
|
||||
debug('looking for %j in []', request);
|
||||
return (newReturn ? null : [request, []]);
|
||||
}
|
||||
|
||||
// Check for relative path
|
||||
if (request.length < 2 ||
|
||||
request.charCodeAt(0) !== 46/*.*/ ||
|
||||
(request.charCodeAt(1) !== 46/*.*/ &&
|
||||
request.charCodeAt(1) !== 47/*/*/)) {
|
||||
var paths = modulePaths;
|
||||
if (parent) {
|
||||
if (!parent.paths)
|
||||
paths = parent.paths = [];
|
||||
else
|
||||
paths = parent.paths.concat(paths);
|
||||
}
|
||||
|
||||
// Maintain backwards compat with certain broken uses of require('.')
|
||||
// by putting the module's directory in front of the lookup paths.
|
||||
if (request === '.') {
|
||||
if (parent && parent.filename) {
|
||||
paths.unshift(path.dirname(parent.filename));
|
||||
} else {
|
||||
paths.unshift(path.resolve(request));
|
||||
}
|
||||
}
|
||||
|
||||
debug('looking for %j in %j', request, paths);
|
||||
return (newReturn ? (paths.length > 0 ? paths : null) : [request, paths]);
|
||||
}
|
||||
|
||||
// with --eval, parent.id is not set and parent.filename is null
|
||||
if (!parent || !parent.id || !parent.filename) {
|
||||
// make require('./path/to/foo') work - normally the path is taken
|
||||
// from realpath(__filename) but with eval there is no filename
|
||||
var mainPaths = ['.'].concat(Module._nodeModulePaths('.'), modulePaths);
|
||||
|
||||
debug('looking for %j in %j', request, mainPaths);
|
||||
return (newReturn ? mainPaths : [request, mainPaths]);
|
||||
}
|
||||
|
||||
// Is the parent an index module?
|
||||
// We can assume the parent has a valid extension,
|
||||
// as it already has been accepted as a module.
|
||||
const base = path.basename(parent.filename);
|
||||
var parentIdPath;
|
||||
if (base.length > indexLen) {
|
||||
var i = 0;
|
||||
for (; i < indexLen; ++i) {
|
||||
if (indexChars[i] !== base.charCodeAt(i))
|
||||
break;
|
||||
}
|
||||
if (i === indexLen) {
|
||||
// We matched 'index.', let's validate the rest
|
||||
for (; i < base.length; ++i) {
|
||||
const code = base.charCodeAt(i);
|
||||
if (code !== 95/*_*/ &&
|
||||
(code < 48/*0*/ || code > 57/*9*/) &&
|
||||
(code < 65/*A*/ || code > 90/*Z*/) &&
|
||||
(code < 97/*a*/ || code > 122/*z*/))
|
||||
break;
|
||||
}
|
||||
if (i === base.length) {
|
||||
// Is an index module
|
||||
parentIdPath = parent.id;
|
||||
} else {
|
||||
// Not an index module
|
||||
parentIdPath = path.dirname(parent.id);
|
||||
}
|
||||
} else {
|
||||
// Not an index module
|
||||
parentIdPath = path.dirname(parent.id);
|
||||
}
|
||||
} else {
|
||||
// Not an index module
|
||||
parentIdPath = path.dirname(parent.id);
|
||||
}
|
||||
var id = path.resolve(parentIdPath, request);
|
||||
|
||||
// make sure require('./path') and require('path') get distinct ids, even
|
||||
// when called from the toplevel js file
|
||||
if (parentIdPath === '.' && id.indexOf('/') === -1) {
|
||||
id = './' + id;
|
||||
}
|
||||
|
||||
debug('RELATIVE: requested: %s set ID to: %s from %s', request, id,
|
||||
parent.id);
|
||||
|
||||
var parentDir = [path.dirname(parent.filename)];
|
||||
debug('looking for %j in %j', id, parentDir);
|
||||
return (newReturn ? parentDir : [id, parentDir]);
|
||||
};
|
||||
|
||||
|
||||
// Check the cache for the requested file.
|
||||
// 1. If a module already exists in the cache: return its exports object.
|
||||
// 2. If the module is native: call `NativeModule.require()` with the
|
||||
// filename and return the result.
|
||||
// 3. Otherwise, create a new module for the file and save it to the cache.
|
||||
// Then have it load the file contents before returning its exports
|
||||
// object.
|
||||
Module._load = function(request, parent, isMain) {
|
||||
if (parent) {
|
||||
debug('Module._load REQUEST %s parent: %s', request, parent.id);
|
||||
}
|
||||
|
||||
if (isMain && experimentalModules) {
|
||||
(async () => {
|
||||
// loader setup
|
||||
if (!ESMLoader) {
|
||||
ESMLoader = new Loader();
|
||||
const userLoader = false // SyNode process.binding('config').userLoader;
|
||||
if (userLoader) {
|
||||
const hooks = await ESMLoader.import(userLoader);
|
||||
ESMLoader = new Loader();
|
||||
ESMLoader.hook(hooks);
|
||||
}
|
||||
}
|
||||
await ESMLoader.import(getURLFromFilePath(request).pathname);
|
||||
})()
|
||||
.catch((e) => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
var filename = Module._resolveFilename(request, parent, isMain);
|
||||
|
||||
var cachedModule = Module._cache[filename];
|
||||
if (cachedModule) {
|
||||
updateChildren(parent, cachedModule, true);
|
||||
return cachedModule.exports;
|
||||
}
|
||||
|
||||
if (NativeModule.nonInternalExists(filename)) {
|
||||
debug('load native module %s', request);
|
||||
return NativeModule.require(filename);
|
||||
}
|
||||
|
||||
// Don't call updateChildren(), Module constructor already does.
|
||||
var module = new Module(filename, parent);
|
||||
|
||||
if (isMain) {
|
||||
process.mainModule = module;
|
||||
module.id = '.';
|
||||
}
|
||||
|
||||
Module._cache[filename] = module;
|
||||
|
||||
tryModuleLoad(module, filename);
|
||||
|
||||
return module.exports;
|
||||
};
|
||||
|
||||
function tryModuleLoad(module, filename) {
|
||||
var threw = true;
|
||||
try {
|
||||
module.load(filename);
|
||||
threw = false;
|
||||
} finally {
|
||||
if (threw) {
|
||||
delete Module._cache[filename];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Module._resolveFilename = function(request, parent, isMain, options) {
|
||||
if (NativeModule.nonInternalExists(request)) {
|
||||
return request;
|
||||
}
|
||||
|
||||
var paths;
|
||||
|
||||
if (typeof options === 'object' && options !== null &&
|
||||
Array.isArray(options.paths)) {
|
||||
paths = [];
|
||||
|
||||
for (var i = 0; i < options.paths.length; i++) {
|
||||
const path = options.paths[i];
|
||||
const lookupPaths = Module._resolveLookupPaths(path, parent, true);
|
||||
|
||||
if (!paths.includes(path))
|
||||
paths.push(path);
|
||||
|
||||
for (var j = 0; j < lookupPaths.length; j++) {
|
||||
if (!paths.includes(lookupPaths[j]))
|
||||
paths.push(lookupPaths[j]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
paths = Module._resolveLookupPaths(request, parent, true);
|
||||
}
|
||||
|
||||
// look up the filename first, since that's the cache key.
|
||||
var filename = Module._findPath(request, paths, isMain);
|
||||
if (!filename) {
|
||||
var err = new Error(`Cannot find module '${request}'`);
|
||||
err.code = 'MODULE_NOT_FOUND';
|
||||
throw err;
|
||||
}
|
||||
return filename;
|
||||
};
|
||||
|
||||
|
||||
// Given a file name, pass it to the proper extension handler.
|
||||
Module.prototype.load = function(filename) {
|
||||
debug('load %j for module %j', filename, this.id);
|
||||
|
||||
assert(!this.loaded);
|
||||
this.filename = filename;
|
||||
this.paths = Module._nodeModulePaths(path.dirname(filename));
|
||||
|
||||
var extension = path.extname(filename) || '.js';
|
||||
if (!Module._extensions[extension]) extension = '.js';
|
||||
Module._extensions[extension](this, filename);
|
||||
this.loaded = true;
|
||||
|
||||
if (ESMLoader) {
|
||||
const url = getURLFromFilePath(filename);
|
||||
const urlString = `${url}`;
|
||||
if (ESMLoader.moduleMap.has(urlString) !== true) {
|
||||
const ctx = createDynamicModule(['default'], url);
|
||||
ctx.reflect.exports.default.set(this.exports);
|
||||
ESMLoader.moduleMap.set(urlString,
|
||||
new ModuleJob(ESMLoader, url, async () => ctx));
|
||||
} else {
|
||||
const job = ESMLoader.moduleMap.get(urlString);
|
||||
if (job.reflect)
|
||||
job.reflect.exports.default.set(this.exports);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Loads a module at the given file path. Returns that module's
|
||||
// `exports` property.
|
||||
Module.prototype.require = function(path) {
|
||||
assert(path, 'missing path');
|
||||
assert(typeof path === 'string', 'path must be a string');
|
||||
return Module._load(path, this, /* isMain */ false);
|
||||
};
|
||||
|
||||
|
||||
// Resolved path to process.argv[1] will be lazily placed here
|
||||
// (needed for setting breakpoint when called with --inspect-brk)
|
||||
var resolvedArgv;
|
||||
|
||||
|
||||
// Run the file contents in the correct scope or sandbox. Expose
|
||||
// the correct helper variables (require, module, exports) to
|
||||
// the file.
|
||||
// Returns exception, if any.
|
||||
Module.prototype._compile = function(content, filename) {
|
||||
|
||||
content = internalModule.stripShebang(content);
|
||||
|
||||
// create wrapper function
|
||||
var wrapper = Module.wrap(content);
|
||||
|
||||
var compiledWrapper = vm.runInThisContext(wrapper, {
|
||||
filename: filename,
|
||||
lineOffset: 0,
|
||||
displayErrors: true
|
||||
});
|
||||
|
||||
var inspectorWrapper = null;
|
||||
if (process._breakFirstLine && process._eval == null) {
|
||||
if (!resolvedArgv) {
|
||||
// we enter the repl if we're not given a filename argument.
|
||||
if (process.argv[1]) {
|
||||
resolvedArgv = Module._resolveFilename(process.argv[1], null, false);
|
||||
} else {
|
||||
resolvedArgv = 'repl';
|
||||
}
|
||||
}
|
||||
|
||||
// Set breakpoint on module start
|
||||
if (filename === resolvedArgv) {
|
||||
delete process._breakFirstLine;
|
||||
inspectorWrapper = process.binding('inspector').callAndPauseOnStart;
|
||||
if (!inspectorWrapper) {
|
||||
const Debug = vm.runInDebugContext('Debug');
|
||||
Debug.setBreakPoint(compiledWrapper, 0, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
var dirname = path.dirname(filename);
|
||||
var require = internalModule.makeRequireFunction(this);
|
||||
var depth = internalModule.requireDepth;
|
||||
if (depth === 0) stat.cache = new Map();
|
||||
var result;
|
||||
if (inspectorWrapper) {
|
||||
result = inspectorWrapper(compiledWrapper, this.exports, this.exports,
|
||||
require, this, filename, dirname);
|
||||
} else {
|
||||
result = compiledWrapper.call(this.exports, this.exports, require, this,
|
||||
filename, dirname);
|
||||
}
|
||||
if (depth === 0) stat.cache = null;
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
// Native extension for .js
|
||||
Module._extensions['.js'] = function(module, filename) {
|
||||
var content = fs.readFileSync(filename, 'utf8');
|
||||
module._compile(internalModule.stripBOM(content), filename);
|
||||
};
|
||||
|
||||
|
||||
// Native extension for .json
|
||||
Module._extensions['.json'] = function(module, filename) {
|
||||
var content = fs.readFileSync(filename, 'utf8');
|
||||
try {
|
||||
module.exports = JSON.parse(internalModule.stripBOM(content));
|
||||
} catch (err) {
|
||||
err.message = filename + ': ' + err.message;
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
//Native extension for .node
|
||||
Module._extensions['.node'] = function(module, filename) {
|
||||
return process.dlopen(module, path._makeLong(filename));
|
||||
};
|
||||
|
||||
//SyNode
|
||||
Module._extensions[process.platform === 'win32' ? '.dll' : '.so'] = process.binding('modules').loadDll;
|
||||
|
||||
if (experimentalModules) {
|
||||
Module._extensions['.mjs'] = function(module, filename) {
|
||||
throw new errors.Error('ERR_REQUIRE_ESM', filename);
|
||||
};
|
||||
}
|
||||
|
||||
// bootstrap main module.
|
||||
Module.runMain = function() {
|
||||
// Load the main module--the command line argument.
|
||||
Module._load(process.argv[1], null, true);
|
||||
// Handle any nextTicks added in the first tick of the program
|
||||
process._tickCallback();
|
||||
};
|
||||
|
||||
Module._initPaths = function() {
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
var homeDir;
|
||||
if (isWindows) {
|
||||
homeDir = process.env.USERPROFILE;
|
||||
} else {
|
||||
homeDir = process.env.HOME;
|
||||
}
|
||||
|
||||
// $PREFIX/lib/node, where $PREFIX is the root of the Node.js installation.
|
||||
var prefixDir;
|
||||
// process.execPath is $PREFIX/bin/node except on Windows where it is
|
||||
// $PREFIX\node.exe.
|
||||
if (isWindows) {
|
||||
prefixDir = path.resolve(process.execPath, '..');
|
||||
} else {
|
||||
prefixDir = path.resolve(process.execPath, '..', '..');
|
||||
}
|
||||
var paths = [path.resolve(prefixDir, 'lib', 'node')];
|
||||
|
||||
if (homeDir) {
|
||||
paths.unshift(path.resolve(homeDir, '.node_libraries'));
|
||||
paths.unshift(path.resolve(homeDir, '.node_modules'));
|
||||
}
|
||||
|
||||
var nodePath = process.env['NODE_PATH'];
|
||||
if (nodePath) {
|
||||
paths = nodePath.split(path.delimiter).filter(function(path) {
|
||||
return !!path;
|
||||
}).concat(paths);
|
||||
}
|
||||
|
||||
modulePaths = paths;
|
||||
|
||||
// clone as a shallow copy, for introspection.
|
||||
Module.globalPaths = modulePaths.slice(0);
|
||||
};
|
||||
|
||||
Module._preloadModules = function(requests) {
|
||||
if (!Array.isArray(requests))
|
||||
return;
|
||||
|
||||
// Preloaded modules have a dummy parent module which is deemed to exist
|
||||
// in the current working directory. This seeds the search path for
|
||||
// preloaded modules.
|
||||
var parent = new Module('internal/preload', null);
|
||||
try {
|
||||
parent.paths = Module._nodeModulePaths(process.cwd());
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
for (var n = 0; n < requests.length; n++)
|
||||
parent.require(requests[n]);
|
||||
};
|
||||
|
||||
Module._initPaths();
|
||||
|
||||
// backwards compatibility
|
||||
Module.Module = Module;
|
5
contrib/mORMot/SyNode/core_modules/node_modules/net.js
generated
vendored
Normal file
5
contrib/mORMot/SyNode/core_modules/node_modules/net.js
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/**
|
||||
* MPV - pure fake!!!!!!
|
||||
*/
|
||||
|
||||
module.exports = {}
|
53
contrib/mORMot/SyNode/core_modules/node_modules/os.js
generated
vendored
Normal file
53
contrib/mORMot/SyNode/core_modules/node_modules/os.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
// Modified by UnityBase core team to be compatible with SyNode
|
||||
|
||||
/**
|
||||
* @module os
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
var util = require('util');
|
||||
const {getHostname} = process.binding('os')
|
||||
|
||||
//MPV TODO implement
|
||||
//var binding = process.binding('os');
|
||||
//exports.endianness = binding.getEndianness;
|
||||
//exports.loadavg = binding.getLoadAvg;
|
||||
//exports.uptime = binding.getUptime;
|
||||
//exports.freemem = binding.getFreeMem;
|
||||
//exports.totalmem = binding.getTotalMem;
|
||||
//exports.cpus = binding.getCPUs;
|
||||
//exports.type = binding.getOSType;
|
||||
//exports.release = binding.getOSRelease;
|
||||
//exports.networkInterfaces = binding.getInterfaceAddresses;
|
||||
|
||||
exports.endianness = function() { return 'LE'; };
|
||||
|
||||
exports.arch = function() {
|
||||
return process.arch;
|
||||
};
|
||||
|
||||
exports.platform = function() {
|
||||
return process.platform;
|
||||
};
|
||||
|
||||
exports.tmpdir = function() {
|
||||
return process.env.TMPDIR ||
|
||||
process.env.TMP ||
|
||||
process.env.TEMP ||
|
||||
(process.platform === 'win32' ? 'c:\\windows\\temp' : '/tmp');
|
||||
};
|
||||
|
||||
exports.tmpDir = exports.tmpdir;
|
||||
|
||||
exports.getNetworkInterfaces = util.deprecate(function() {
|
||||
return exports.networkInterfaces();
|
||||
}, 'getNetworkInterfaces is now called `os.networkInterfaces`.');
|
||||
|
||||
exports.EOL = process.platform === 'win32' ? '\r\n' : '\n';
|
||||
|
||||
exports.hostname = function() {
|
||||
let hn = getHostname();
|
||||
return hn.toLowerCase()
|
||||
}
|
||||
exports.hostname[Symbol.toPrimitive] = () => exports.hostname();
|
1603
contrib/mORMot/SyNode/core_modules/node_modules/path.js
generated
vendored
Normal file
1603
contrib/mORMot/SyNode/core_modules/node_modules/path.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
256
contrib/mORMot/SyNode/core_modules/node_modules/polyfill/WindowTimer.js
generated
vendored
Normal file
256
contrib/mORMot/SyNode/core_modules/node_modules/polyfill/WindowTimer.js
generated
vendored
Normal file
@@ -0,0 +1,256 @@
|
||||
/* Implementation of HTML Timers (setInterval/setTimeout) based on sleep.
|
||||
* @license MIT
|
||||
*
|
||||
* Copyright 2012 Kevin Locke <kevin@kevinlocke.name>
|
||||
* Modified by UnityBase team - added priority to realise the setImmediate
|
||||
*/
|
||||
/*jslint bitwise: true, evil: true */
|
||||
|
||||
/**
|
||||
* Adds methods to implement the HTML5 WindowTimers interface on a given
|
||||
* object.
|
||||
*
|
||||
* Adds the following methods:
|
||||
*
|
||||
* - clearInterval
|
||||
* - clearTimeout
|
||||
* - setInterval
|
||||
* - setTimeout<
|
||||
*
|
||||
* See http://www.whatwg.org/specs/web-apps/current-work/multipage/timers.html
|
||||
* for the complete specification of these methods.
|
||||
*
|
||||
* @module WindowTimer
|
||||
*/
|
||||
var WindowTimer = {};
|
||||
|
||||
/**
|
||||
* @method makeWindowTimer
|
||||
*
|
||||
* @param {Object} target Object to which the methods should be added.
|
||||
* @param {Function} sleep A function which sleeps for a specified number of
|
||||
* milliseconds.
|
||||
* @return {Function} The function which runs the scheduled timers.
|
||||
*/
|
||||
function makeWindowTimer(target, sleep) {
|
||||
"use strict";
|
||||
|
||||
var counter = 1,
|
||||
inCallback = false,
|
||||
// Map handle -> timer
|
||||
timersByHandle = {},
|
||||
// Min-heap of timers by time then handle, index 0 unused
|
||||
timersByTime = [ null ];
|
||||
|
||||
/** Compares timers based on scheduled time and handle. */
|
||||
function timerCompare(t1, t2) {
|
||||
// Note: Only need less-than for our uses
|
||||
return t1.priority < t2.priority ? -1 : (t1.priority === t2.priority ?
|
||||
(t1.time < t2.time ? -1 :
|
||||
(t1.time === t2.time && t1.handle < t2.handle ? -1 : 0)) : 0);
|
||||
}
|
||||
|
||||
/** Fix the heap invariant which may be violated at a given index */
|
||||
function heapFixDown(heap, i, lesscmp) {
|
||||
var j, tmp;
|
||||
|
||||
j = i * 2;
|
||||
while (j < heap.length) {
|
||||
if (j + 1 < heap.length &&
|
||||
lesscmp(heap[j + 1], heap[j]) < 0) {
|
||||
j = j + 1;
|
||||
}
|
||||
|
||||
if (lesscmp(heap[i], heap[j]) < 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
tmp = heap[j];
|
||||
heap[j] = heap[i];
|
||||
heap[i] = tmp;
|
||||
i = j;
|
||||
j = i * 2;
|
||||
}
|
||||
}
|
||||
|
||||
/** Fix the heap invariant which may be violated at a given index */
|
||||
function heapFixUp(heap, i, lesscmp) {
|
||||
var j, tmp;
|
||||
while (i > 1) {
|
||||
j = i >> 1; // Integer div by 2
|
||||
|
||||
if (lesscmp(heap[j], heap[i]) < 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
tmp = heap[j];
|
||||
heap[j] = heap[i];
|
||||
heap[i] = tmp;
|
||||
i = j;
|
||||
}
|
||||
}
|
||||
|
||||
/** Remove the timer element from the heap */
|
||||
function heapPop(heap, lesscmp, timer) {
|
||||
for (let index = 1; index < heap.length - 1; index++) {
|
||||
if (heap[index] && heap[index].handle === timer.handle) {
|
||||
heap[index] = heap[heap.length - 1];
|
||||
}
|
||||
}
|
||||
//heap[1] = heap[heap.length - 1];
|
||||
heap.pop();
|
||||
heapFixDown(heap, 1, lesscmp);
|
||||
}
|
||||
|
||||
/** Create a timer and schedule code to run at a given time */
|
||||
function addTimer(code, delay, repeat, argsIfFn, priority) {
|
||||
var handle, timer;
|
||||
|
||||
if (typeof code !== "function") {
|
||||
code = String(code);
|
||||
argsIfFn = null;
|
||||
}
|
||||
|
||||
delay = Number(delay) || 0;
|
||||
if (inCallback) {
|
||||
delay = Math.max(delay, 4);
|
||||
}
|
||||
// Note: Must set handle after argument conversion to properly
|
||||
// handle conformance test in HTML5 spec.
|
||||
handle = counter;
|
||||
counter += 1;
|
||||
|
||||
timer = {
|
||||
args: argsIfFn,
|
||||
cancel: false,
|
||||
code: code,
|
||||
handle: handle,
|
||||
repeat: repeat ? Math.max(delay, 4) : 0,
|
||||
time: new Date().getTime() + delay,
|
||||
priority: priority || 0
|
||||
};
|
||||
|
||||
timersByHandle[handle] = timer;
|
||||
timersByTime.push(timer);
|
||||
heapFixUp(timersByTime, timersByTime.length - 1, timerCompare);
|
||||
|
||||
return handle;
|
||||
}
|
||||
|
||||
/** Cancel an existing timer */
|
||||
function cancelTimer(handle, repeat) {
|
||||
var timer;
|
||||
|
||||
if (timersByHandle.hasOwnProperty(handle)) {
|
||||
timer = timersByHandle[handle];
|
||||
if (repeat === (timer.repeat > 0)) {
|
||||
timer.cancel = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function clearInterval(handle) {
|
||||
cancelTimer(handle, true);
|
||||
}
|
||||
target.clearInterval = clearInterval;
|
||||
|
||||
function clearTimeout(handle) {
|
||||
cancelTimer(handle, false);
|
||||
}
|
||||
target.clearTimeout = clearTimeout;
|
||||
|
||||
function setInterval(code, delay) {
|
||||
return addTimer(
|
||||
code,
|
||||
delay,
|
||||
true,
|
||||
Array.prototype.slice.call(arguments, 2)
|
||||
);
|
||||
}
|
||||
target.setInterval = setInterval;
|
||||
|
||||
function setTimeout(code, delay) {
|
||||
return addTimer(
|
||||
code,
|
||||
delay,
|
||||
false,
|
||||
Array.prototype.slice.call(arguments, 2),
|
||||
0
|
||||
);
|
||||
}
|
||||
target.setTimeout = setTimeout;
|
||||
|
||||
function setTimeout(code, delay) {
|
||||
return addTimer(
|
||||
code,
|
||||
delay,
|
||||
false,
|
||||
Array.prototype.slice.call(arguments, 2),
|
||||
0
|
||||
);
|
||||
}
|
||||
target.setTimeout = setTimeout;
|
||||
|
||||
function setTimeoutWithPriority(code, delay, priority) {
|
||||
return addTimer(
|
||||
code,
|
||||
delay,
|
||||
false,
|
||||
Array.prototype.slice.call(arguments, 3),
|
||||
priority
|
||||
);
|
||||
}
|
||||
timerLoop.setTimeoutWithPriority = setTimeoutWithPriority;
|
||||
|
||||
function timerLoop(nonblocking) {
|
||||
// on the way out, don't bother. it won't get fired anyway.
|
||||
if (process._exiting)
|
||||
return;
|
||||
|
||||
var now, timer;
|
||||
|
||||
// Note: index 0 unused in timersByTime
|
||||
while (timersByTime.length > 1) {
|
||||
timer = timersByTime[1];
|
||||
|
||||
if (timer.cancel) {
|
||||
delete timersByHandle[timer.handle];
|
||||
heapPop(timersByTime, timerCompare, timer);
|
||||
} else {
|
||||
now = new Date().getTime();
|
||||
if (timer.time <= now) {
|
||||
inCallback = true;
|
||||
try {
|
||||
if (typeof timer.code === "function") {
|
||||
timer.code.apply(undefined, timer.args);
|
||||
} else {
|
||||
eval(timer.code);
|
||||
}
|
||||
} finally {
|
||||
inCallback = false;
|
||||
}
|
||||
|
||||
if (timer.repeat > 0 && !timer.cancel) {
|
||||
timer.time += timer.repeat;
|
||||
heapFixDown(timersByTime, 1, timerCompare);
|
||||
} else {
|
||||
delete timersByHandle[timer.handle];
|
||||
heapPop(timersByTime, timerCompare, timer);
|
||||
}
|
||||
} else if (!nonblocking) {
|
||||
sleep(timer.time - now);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
return timerLoop;
|
||||
}
|
||||
|
||||
if (typeof exports === "object") {
|
||||
exports.makeWindowTimer = makeWindowTimer;
|
||||
}
|
||||
|
510
contrib/mORMot/SyNode/core_modules/node_modules/punycode.js
generated
vendored
Normal file
510
contrib/mORMot/SyNode/core_modules/node_modules/punycode.js
generated
vendored
Normal file
@@ -0,0 +1,510 @@
|
||||
/*! http://mths.be/punycode v1.2.0 by @mathias */
|
||||
;(function(root) {
|
||||
|
||||
/**
|
||||
* The `punycode` object.
|
||||
* @name punycode
|
||||
* @type Object
|
||||
*/
|
||||
var punycode,
|
||||
|
||||
/** Detect free variables `define`, `exports`, `module` and `require` */
|
||||
freeDefine = typeof define == 'function' && typeof define.amd == 'object' &&
|
||||
define.amd && define,
|
||||
freeExports = typeof exports == 'object' && exports,
|
||||
freeModule = typeof module == 'object' && module,
|
||||
freeRequire = typeof require == 'function' && require,
|
||||
|
||||
/** Highest positive signed 32-bit float value */
|
||||
maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1
|
||||
|
||||
/** Bootstring parameters */
|
||||
base = 36,
|
||||
tMin = 1,
|
||||
tMax = 26,
|
||||
skew = 38,
|
||||
damp = 700,
|
||||
initialBias = 72,
|
||||
initialN = 128, // 0x80
|
||||
delimiter = '-', // '\x2D'
|
||||
|
||||
/** Regular expressions */
|
||||
regexPunycode = /^xn--/,
|
||||
regexNonASCII = /[^ -~]/, // unprintable ASCII chars + non-ASCII chars
|
||||
regexSeparators = /\x2E|\u3002|\uFF0E|\uFF61/g, // RFC 3490 separators
|
||||
|
||||
/** Error messages */
|
||||
errors = {
|
||||
'overflow': 'Overflow: input needs wider integers to process',
|
||||
'not-basic': 'Illegal input >= 0x80 (not a basic code point)',
|
||||
'invalid-input': 'Invalid input'
|
||||
},
|
||||
|
||||
/** Convenience shortcuts */
|
||||
baseMinusTMin = base - tMin,
|
||||
floor = Math.floor,
|
||||
stringFromCharCode = String.fromCharCode,
|
||||
|
||||
/** Temporary variable */
|
||||
key;
|
||||
|
||||
/*--------------------------------------------------------------------------*/
|
||||
|
||||
/**
|
||||
* A generic error utility function.
|
||||
* @private
|
||||
* @param {String} type The error type.
|
||||
* @returns {Error} Throws a `RangeError` with the applicable error message.
|
||||
*/
|
||||
function error(type) {
|
||||
throw RangeError(errors[type]);
|
||||
}
|
||||
|
||||
/**
|
||||
* A generic `Array#map` utility function.
|
||||
* @private
|
||||
* @param {Array} array The array to iterate over.
|
||||
* @param {Function} callback The function that gets called for every array
|
||||
* item.
|
||||
* @returns {Array} A new array of values returned by the callback function.
|
||||
*/
|
||||
function map(array, fn) {
|
||||
var length = array.length;
|
||||
while (length--) {
|
||||
array[length] = fn(array[length]);
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple `Array#map`-like wrapper to work with domain name strings.
|
||||
* @private
|
||||
* @param {String} domain The domain name.
|
||||
* @param {Function} callback The function that gets called for every
|
||||
* character.
|
||||
* @returns {Array} A new string of characters returned by the callback
|
||||
* function.
|
||||
*/
|
||||
function mapDomain(string, fn) {
|
||||
return map(string.split(regexSeparators), fn).join('.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an array containing the decimal code points of each Unicode
|
||||
* character in the string. While JavaScript uses UCS-2 internally,
|
||||
* this function will convert a pair of surrogate halves (each of which
|
||||
* UCS-2 exposes as separate characters) into a single code point,
|
||||
* matching UTF-16.
|
||||
* @see `punycode.ucs2.encode`
|
||||
* @see <http://mathiasbynens.be/notes/javascript-encoding>
|
||||
* @memberOf punycode.ucs2
|
||||
* @name decode
|
||||
* @param {String} string The Unicode input string (UCS-2).
|
||||
* @returns {Array} The new array of code points.
|
||||
*/
|
||||
function ucs2decode(string) {
|
||||
var output = [],
|
||||
counter = 0,
|
||||
length = string.length,
|
||||
value,
|
||||
extra;
|
||||
while (counter < length) {
|
||||
value = string.charCodeAt(counter++);
|
||||
if ((value & 0xF800) == 0xD800 && counter < length) {
|
||||
// high surrogate, and there is a next character
|
||||
extra = string.charCodeAt(counter++);
|
||||
if ((extra & 0xFC00) == 0xDC00) { // low surrogate
|
||||
output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);
|
||||
} else {
|
||||
output.push(value, extra);
|
||||
}
|
||||
} else {
|
||||
output.push(value);
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a string based on an array of decimal code points.
|
||||
* @see `punycode.ucs2.decode`
|
||||
* @memberOf punycode.ucs2
|
||||
* @name encode
|
||||
* @param {Array} codePoints The array of decimal code points.
|
||||
* @returns {String} The new Unicode string (UCS-2).
|
||||
*/
|
||||
function ucs2encode(array) {
|
||||
return map(array, function(value) {
|
||||
var output = '';
|
||||
if (value > 0xFFFF) {
|
||||
value -= 0x10000;
|
||||
output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800);
|
||||
value = 0xDC00 | value & 0x3FF;
|
||||
}
|
||||
output += stringFromCharCode(value);
|
||||
return output;
|
||||
}).join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a basic code point into a digit/integer.
|
||||
* @see `digitToBasic()`
|
||||
* @private
|
||||
* @param {Number} codePoint The basic (decimal) code point.
|
||||
* @returns {Number} The numeric value of a basic code point (for use in
|
||||
* representing integers) in the range `0` to `base - 1`, or `base` if
|
||||
* the code point does not represent a value.
|
||||
*/
|
||||
function basicToDigit(codePoint) {
|
||||
return codePoint - 48 < 10
|
||||
? codePoint - 22
|
||||
: codePoint - 65 < 26
|
||||
? codePoint - 65
|
||||
: codePoint - 97 < 26
|
||||
? codePoint - 97
|
||||
: base;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a digit/integer into a basic code point.
|
||||
* @see `basicToDigit()`
|
||||
* @private
|
||||
* @param {Number} digit The numeric value of a basic code point.
|
||||
* @returns {Number} The basic code point whose value (when used for
|
||||
* representing integers) is `digit`, which needs to be in the range
|
||||
* `0` to `base - 1`. If `flag` is non-zero, the uppercase form is
|
||||
* used; else, the lowercase form is used. The behavior is undefined
|
||||
* if flag is non-zero and `digit` has no uppercase form.
|
||||
*/
|
||||
function digitToBasic(digit, flag) {
|
||||
// 0..25 map to ASCII a..z or A..Z
|
||||
// 26..35 map to ASCII 0..9
|
||||
return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);
|
||||
}
|
||||
|
||||
/**
|
||||
* Bias adaptation function as per section 3.4 of RFC 3492.
|
||||
* http://tools.ietf.org/html/rfc3492#section-3.4
|
||||
* @private
|
||||
*/
|
||||
function adapt(delta, numPoints, firstTime) {
|
||||
var k = 0;
|
||||
delta = firstTime ? floor(delta / damp) : delta >> 1;
|
||||
delta += floor(delta / numPoints);
|
||||
for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) {
|
||||
delta = floor(delta / baseMinusTMin);
|
||||
}
|
||||
return floor(k + (baseMinusTMin + 1) * delta / (delta + skew));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a basic code point to lowercase if `flag` is falsy, or to
|
||||
* uppercase if `flag` is truthy. The code point is unchanged if it's
|
||||
* caseless. The behavior is undefined if `codePoint` is not a basic code
|
||||
* point.
|
||||
* @private
|
||||
* @param {Number} codePoint The numeric value of a basic code point.
|
||||
* @returns {Number} The resulting basic code point.
|
||||
*/
|
||||
function encodeBasic(codePoint, flag) {
|
||||
codePoint -= (codePoint - 97 < 26) << 5;
|
||||
return codePoint + (!flag && codePoint - 65 < 26) << 5;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Punycode string of ASCII code points to a string of Unicode
|
||||
* code points.
|
||||
* @memberOf punycode
|
||||
* @param {String} input The Punycode string of ASCII code points.
|
||||
* @returns {String} The resulting string of Unicode code points.
|
||||
*/
|
||||
function decode(input) {
|
||||
// Don't use UCS-2
|
||||
var output = [],
|
||||
inputLength = input.length,
|
||||
out,
|
||||
i = 0,
|
||||
n = initialN,
|
||||
bias = initialBias,
|
||||
basic,
|
||||
j,
|
||||
index,
|
||||
oldi,
|
||||
w,
|
||||
k,
|
||||
digit,
|
||||
t,
|
||||
length,
|
||||
/** Cached calculation results */
|
||||
baseMinusT;
|
||||
|
||||
// Handle the basic code points: let `basic` be the number of input code
|
||||
// points before the last delimiter, or `0` if there is none, then copy
|
||||
// the first basic code points to the output.
|
||||
|
||||
basic = input.lastIndexOf(delimiter);
|
||||
if (basic < 0) {
|
||||
basic = 0;
|
||||
}
|
||||
|
||||
for (j = 0; j < basic; ++j) {
|
||||
// if it's not a basic code point
|
||||
if (input.charCodeAt(j) >= 0x80) {
|
||||
error('not-basic');
|
||||
}
|
||||
output.push(input.charCodeAt(j));
|
||||
}
|
||||
|
||||
// Main decoding loop: start just after the last delimiter if any basic code
|
||||
// points were copied; start at the beginning otherwise.
|
||||
|
||||
for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) {
|
||||
|
||||
// `index` is the index of the next character to be consumed.
|
||||
// Decode a generalized variable-length integer into `delta`,
|
||||
// which gets added to `i`. The overflow checking is easier
|
||||
// if we increase `i` as we go, then subtract off its starting
|
||||
// value at the end to obtain `delta`.
|
||||
for (oldi = i, w = 1, k = base; /* no condition */; k += base) {
|
||||
|
||||
if (index >= inputLength) {
|
||||
error('invalid-input');
|
||||
}
|
||||
|
||||
digit = basicToDigit(input.charCodeAt(index++));
|
||||
|
||||
if (digit >= base || digit > floor((maxInt - i) / w)) {
|
||||
error('overflow');
|
||||
}
|
||||
|
||||
i += digit * w;
|
||||
t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
|
||||
|
||||
if (digit < t) {
|
||||
break;
|
||||
}
|
||||
|
||||
baseMinusT = base - t;
|
||||
if (w > floor(maxInt / baseMinusT)) {
|
||||
error('overflow');
|
||||
}
|
||||
|
||||
w *= baseMinusT;
|
||||
|
||||
}
|
||||
|
||||
out = output.length + 1;
|
||||
bias = adapt(i - oldi, out, oldi == 0);
|
||||
|
||||
// `i` was supposed to wrap around from `out` to `0`,
|
||||
// incrementing `n` each time, so we'll fix that now:
|
||||
if (floor(i / out) > maxInt - n) {
|
||||
error('overflow');
|
||||
}
|
||||
|
||||
n += floor(i / out);
|
||||
i %= out;
|
||||
|
||||
// Insert `n` at position `i` of the output
|
||||
output.splice(i++, 0, n);
|
||||
|
||||
}
|
||||
|
||||
return ucs2encode(output);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a string of Unicode code points to a Punycode string of ASCII
|
||||
* code points.
|
||||
* @memberOf punycode
|
||||
* @param {String} input The string of Unicode code points.
|
||||
* @returns {String} The resulting Punycode string of ASCII code points.
|
||||
*/
|
||||
function encode(input) {
|
||||
var n,
|
||||
delta,
|
||||
handledCPCount,
|
||||
basicLength,
|
||||
bias,
|
||||
j,
|
||||
m,
|
||||
q,
|
||||
k,
|
||||
t,
|
||||
currentValue,
|
||||
output = [],
|
||||
/** `inputLength` will hold the number of code points in `input`. */
|
||||
inputLength,
|
||||
/** Cached calculation results */
|
||||
handledCPCountPlusOne,
|
||||
baseMinusT,
|
||||
qMinusT;
|
||||
|
||||
// Convert the input in UCS-2 to Unicode
|
||||
input = ucs2decode(input);
|
||||
|
||||
// Cache the length
|
||||
inputLength = input.length;
|
||||
|
||||
// Initialize the state
|
||||
n = initialN;
|
||||
delta = 0;
|
||||
bias = initialBias;
|
||||
|
||||
// Handle the basic code points
|
||||
for (j = 0; j < inputLength; ++j) {
|
||||
currentValue = input[j];
|
||||
if (currentValue < 0x80) {
|
||||
output.push(stringFromCharCode(currentValue));
|
||||
}
|
||||
}
|
||||
|
||||
handledCPCount = basicLength = output.length;
|
||||
|
||||
// `handledCPCount` is the number of code points that have been handled;
|
||||
// `basicLength` is the number of basic code points.
|
||||
|
||||
// Finish the basic string - if it is not empty - with a delimiter
|
||||
if (basicLength) {
|
||||
output.push(delimiter);
|
||||
}
|
||||
|
||||
// Main encoding loop:
|
||||
while (handledCPCount < inputLength) {
|
||||
|
||||
// All non-basic code points < n have been handled already. Find the next
|
||||
// larger one:
|
||||
for (m = maxInt, j = 0; j < inputLength; ++j) {
|
||||
currentValue = input[j];
|
||||
if (currentValue >= n && currentValue < m) {
|
||||
m = currentValue;
|
||||
}
|
||||
}
|
||||
|
||||
// Increase `delta` enough to advance the decoder's <n,i> state to <m,0>,
|
||||
// but guard against overflow
|
||||
handledCPCountPlusOne = handledCPCount + 1;
|
||||
if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {
|
||||
error('overflow');
|
||||
}
|
||||
|
||||
delta += (m - n) * handledCPCountPlusOne;
|
||||
n = m;
|
||||
|
||||
for (j = 0; j < inputLength; ++j) {
|
||||
currentValue = input[j];
|
||||
|
||||
if (currentValue < n && ++delta > maxInt) {
|
||||
error('overflow');
|
||||
}
|
||||
|
||||
if (currentValue == n) {
|
||||
// Represent delta as a generalized variable-length integer
|
||||
for (q = delta, k = base; /* no condition */; k += base) {
|
||||
t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
|
||||
if (q < t) {
|
||||
break;
|
||||
}
|
||||
qMinusT = q - t;
|
||||
baseMinusT = base - t;
|
||||
output.push(
|
||||
stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0))
|
||||
);
|
||||
q = floor(qMinusT / baseMinusT);
|
||||
}
|
||||
|
||||
output.push(stringFromCharCode(digitToBasic(q, 0)));
|
||||
bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);
|
||||
delta = 0;
|
||||
++handledCPCount;
|
||||
}
|
||||
}
|
||||
|
||||
++delta;
|
||||
++n;
|
||||
|
||||
}
|
||||
return output.join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Punycode string representing a domain name to Unicode. Only the
|
||||
* Punycoded parts of the domain name will be converted, i.e. it doesn't
|
||||
* matter if you call it on a string that has already been converted to
|
||||
* Unicode.
|
||||
* @memberOf punycode
|
||||
* @param {String} domain The Punycode domain name to convert to Unicode.
|
||||
* @returns {String} The Unicode representation of the given Punycode
|
||||
* string.
|
||||
*/
|
||||
function toUnicode(domain) {
|
||||
return mapDomain(domain, function(string) {
|
||||
return regexPunycode.test(string)
|
||||
? decode(string.slice(4).toLowerCase())
|
||||
: string;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a Unicode string representing a domain name to Punycode. Only the
|
||||
* non-ASCII parts of the domain name will be converted, i.e. it doesn't
|
||||
* matter if you call it with a domain that's already in ASCII.
|
||||
* @memberOf punycode
|
||||
* @param {String} domain The domain name to convert, as a Unicode string.
|
||||
* @returns {String} The Punycode representation of the given domain name.
|
||||
*/
|
||||
function toASCII(domain) {
|
||||
return mapDomain(domain, function(string) {
|
||||
return regexNonASCII.test(string)
|
||||
? 'xn--' + encode(string)
|
||||
: string;
|
||||
});
|
||||
}
|
||||
|
||||
/*--------------------------------------------------------------------------*/
|
||||
|
||||
/** Define the public API */
|
||||
punycode = {
|
||||
/**
|
||||
* A string representing the current Punycode.js version number.
|
||||
* @memberOf punycode
|
||||
* @type String
|
||||
*/
|
||||
'version': '1.2.0',
|
||||
/**
|
||||
* An object of methods to convert from JavaScript's internal character
|
||||
* representation (UCS-2) to decimal Unicode code points, and back.
|
||||
* @see <http://mathiasbynens.be/notes/javascript-encoding>
|
||||
* @memberOf punycode
|
||||
* @type Object
|
||||
*/
|
||||
'ucs2': {
|
||||
'decode': ucs2decode,
|
||||
'encode': ucs2encode
|
||||
},
|
||||
'decode': decode,
|
||||
'encode': encode,
|
||||
'toASCII': toASCII,
|
||||
'toUnicode': toUnicode
|
||||
};
|
||||
|
||||
/** Expose `punycode` */
|
||||
if (freeExports) {
|
||||
if (freeModule && freeModule.exports == freeExports) {
|
||||
// in Node.js or Ringo 0.8+
|
||||
freeModule.exports = punycode;
|
||||
} else {
|
||||
// in Narwhal or Ringo 0.7-
|
||||
for (key in punycode) {
|
||||
punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]);
|
||||
}
|
||||
}
|
||||
} else if (freeDefine) {
|
||||
// via curl.js or RequireJS
|
||||
define('punycode', punycode);
|
||||
} else {
|
||||
// in a browser or Rhino
|
||||
root.punycode = punycode;
|
||||
}
|
||||
|
||||
}(this));
|
254
contrib/mORMot/SyNode/core_modules/node_modules/querystring.js
generated
vendored
Normal file
254
contrib/mORMot/SyNode/core_modules/node_modules/querystring.js
generated
vendored
Normal file
@@ -0,0 +1,254 @@
|
||||
// Copyright Joyent, Inc. and other Node contributors.
|
||||
// Modified by UnityBase core team to be compatible with SyNode
|
||||
// Query String Utilities
|
||||
|
||||
/**
|
||||
* @module querystring
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
var QueryString = exports;
|
||||
|
||||
/**
|
||||
* This module provides utilities for dealing with query strings. Call `require('url')` to use it. This is port of NodeJS <a href="http://nodejs.org/api/querystring.html">querystring</a> module.
|
||||
* Small sample:
|
||||
*
|
||||
* var querystring = require('querystring');
|
||||
* querystring.stringify({param1: 'value1', param2: ['arr1', 'arr2'], paramEmpty: '' })
|
||||
* // returns 'param1=value1¶m2=arr1¶m2=arr2¶mEmpty='
|
||||
*
|
||||
* @module querystring
|
||||
*/
|
||||
|
||||
// If obj.hasOwnProperty has been overridden, then calling
|
||||
// obj.hasOwnProperty(prop) will break.
|
||||
// See: https://github.com/joyent/node/issues/1707
|
||||
function hasOwnProperty(obj, prop) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, prop);
|
||||
}
|
||||
|
||||
|
||||
function charCode(c) {
|
||||
return c.charCodeAt(0);
|
||||
}
|
||||
|
||||
|
||||
// a safe fast alternative to decodeURIComponent
|
||||
QueryString.unescapeBuffer = function(s, decodeSpaces) {
|
||||
var out = new Buffer(s.length);
|
||||
var state = 'CHAR'; // states: CHAR, HEX0, HEX1
|
||||
var n, m, hexchar;
|
||||
|
||||
for (var inIndex = 0, outIndex = 0; inIndex <= s.length; inIndex++) {
|
||||
var c = s.charCodeAt(inIndex);
|
||||
switch (state) {
|
||||
case 'CHAR':
|
||||
//noinspection FallThroughInSwitchStatementJS
|
||||
switch (c) {
|
||||
case charCode('%'):
|
||||
n = 0;
|
||||
m = 0;
|
||||
state = 'HEX0';
|
||||
break;
|
||||
case charCode('+'):
|
||||
if (decodeSpaces) c = charCode(' ');
|
||||
// pass thru
|
||||
default:
|
||||
out[outIndex++] = c;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'HEX0':
|
||||
state = 'HEX1';
|
||||
hexchar = c;
|
||||
if (charCode('0') <= c && c <= charCode('9')) {
|
||||
n = c - charCode('0');
|
||||
} else if (charCode('a') <= c && c <= charCode('f')) {
|
||||
n = c - charCode('a') + 10;
|
||||
} else if (charCode('A') <= c && c <= charCode('F')) {
|
||||
n = c - charCode('A') + 10;
|
||||
} else {
|
||||
out[outIndex++] = charCode('%');
|
||||
out[outIndex++] = c;
|
||||
state = 'CHAR';
|
||||
break;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'HEX1':
|
||||
state = 'CHAR';
|
||||
if (charCode('0') <= c && c <= charCode('9')) {
|
||||
m = c - charCode('0');
|
||||
} else if (charCode('a') <= c && c <= charCode('f')) {
|
||||
m = c - charCode('a') + 10;
|
||||
} else if (charCode('A') <= c && c <= charCode('F')) {
|
||||
m = c - charCode('A') + 10;
|
||||
} else {
|
||||
out[outIndex++] = charCode('%');
|
||||
out[outIndex++] = hexchar;
|
||||
out[outIndex++] = c;
|
||||
break;
|
||||
}
|
||||
out[outIndex++] = 16 * n + m;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO support returning arbitrary buffers.
|
||||
|
||||
return out.slice(0, outIndex - 1);
|
||||
};
|
||||
|
||||
/**
|
||||
* The unescape function used by querystring.parse, provided so that it could be overridden if necessary.
|
||||
*
|
||||
* @param s
|
||||
* @param decodeSpaces
|
||||
* @return {*}
|
||||
*/
|
||||
QueryString.unescape = function(s, decodeSpaces) {
|
||||
return QueryString.unescapeBuffer(s, decodeSpaces).toString();
|
||||
};
|
||||
|
||||
/**
|
||||
* The escape function used by querystring.stringify, provided so that it could be overridden if necessary.
|
||||
* @param str
|
||||
* @return {string}
|
||||
*/
|
||||
QueryString.escape = function(str) {
|
||||
return encodeURIComponent(str);
|
||||
};
|
||||
|
||||
var stringifyPrimitive = function(v) {
|
||||
switch (typeof v) {
|
||||
case 'string':
|
||||
return v;
|
||||
|
||||
case 'boolean':
|
||||
return v ? 'true' : 'false';
|
||||
|
||||
case 'number':
|
||||
return isFinite(v) ? v : '';
|
||||
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serialize an object to a query string. Optionally override the default separator ('&') and assignment ('=') characters.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' })
|
||||
* // returns
|
||||
* 'foo=bar&baz=qux&baz=quux&corge='
|
||||
*
|
||||
* querystring.stringify({foo: 'bar', baz: 'qux'}, ';', ':')
|
||||
* // returns
|
||||
* 'foo:bar;baz:qux'
|
||||
*
|
||||
* @param {Object} obj
|
||||
* @param {String} [sep="&"]
|
||||
* @param {String} [eq="="]
|
||||
*/
|
||||
QueryString.stringify = QueryString.encode = function(obj, sep, eq, name) {
|
||||
sep = sep || '&';
|
||||
eq = eq || '=';
|
||||
if (obj === null) {
|
||||
obj = undefined;
|
||||
}
|
||||
|
||||
if (typeof obj === 'object') {
|
||||
return Object.keys(obj).map(function(k) {
|
||||
var ks = QueryString.escape(stringifyPrimitive(k)) + eq;
|
||||
if (Array.isArray(obj[k])) {
|
||||
return obj[k].map(function(v) {
|
||||
return ks + QueryString.escape(stringifyPrimitive(v));
|
||||
}).join(sep);
|
||||
} else {
|
||||
return ks + QueryString.escape(stringifyPrimitive(obj[k]));
|
||||
}
|
||||
}).join(sep);
|
||||
|
||||
}
|
||||
|
||||
if (!name) return '';
|
||||
return QueryString.escape(stringifyPrimitive(name)) + eq +
|
||||
QueryString.escape(stringifyPrimitive(obj));
|
||||
};
|
||||
|
||||
/**
|
||||
* Deserialize a query string to an object. Optionally override the default separator ('&') and assignment ('=') characters.
|
||||
*
|
||||
* Options object may contain maxKeys property (equal to 1000 by default), it'll be used to limit processed keys. Set it to 0 to remove key count limitation.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* querystring.parse('foo=bar&baz=qux&baz=quux&corge')
|
||||
* // returns
|
||||
* { foo: 'bar', baz: ['qux', 'quux'], corge: '' }
|
||||
*
|
||||
* @method parse
|
||||
* @param {Object} obj
|
||||
* @param {String} [sep="&"]
|
||||
* @param {String} [eq="="]
|
||||
* @param {Object} [options]
|
||||
* @param {Number} [options.maxKeys=1000]
|
||||
*/
|
||||
QueryString.parse = QueryString.decode = function(qs, sep, eq, options) {
|
||||
sep = sep || '&';
|
||||
eq = eq || '=';
|
||||
var obj = {};
|
||||
|
||||
if (typeof qs !== 'string' || qs.length === 0) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
var regexp = /\+/g;
|
||||
qs = qs.split(sep);
|
||||
|
||||
var maxKeys = 1000;
|
||||
if (options && typeof options.maxKeys === 'number') {
|
||||
maxKeys = options.maxKeys;
|
||||
}
|
||||
|
||||
var len = qs.length;
|
||||
// maxKeys <= 0 means that we should not limit keys count
|
||||
if (maxKeys > 0 && len > maxKeys) {
|
||||
len = maxKeys;
|
||||
}
|
||||
|
||||
for (var i = 0; i < len; ++i) {
|
||||
var x = qs[i].replace(regexp, '%20'),
|
||||
idx = x.indexOf(eq),
|
||||
kstr, vstr, k, v;
|
||||
|
||||
if (idx >= 0) {
|
||||
kstr = x.substr(0, idx);
|
||||
vstr = x.substr(idx + 1);
|
||||
} else {
|
||||
kstr = x;
|
||||
vstr = '';
|
||||
}
|
||||
|
||||
try {
|
||||
k = decodeURIComponent(kstr);
|
||||
v = decodeURIComponent(vstr);
|
||||
} catch (e) {
|
||||
k = QueryString.unescape(kstr, true);
|
||||
v = QueryString.unescape(vstr, true);
|
||||
}
|
||||
|
||||
if (!hasOwnProperty(obj, k)) {
|
||||
obj[k] = v;
|
||||
} else if (Array.isArray(obj[k])) {
|
||||
obj[k].push(v);
|
||||
} else {
|
||||
obj[k] = [obj[k], v];
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
111
contrib/mORMot/SyNode/core_modules/node_modules/stream.js
generated
vendored
Normal file
111
contrib/mORMot/SyNode/core_modules/node_modules/stream.js
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
'use strict';
|
||||
/**
|
||||
* See <a href="https://nodejs.org/api/stream.html">Node <strong>stream</strong> module documentation</a>
|
||||
* @module stream
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
module.exports = Stream;
|
||||
|
||||
const EE = require('events');
|
||||
const util = require('util');
|
||||
|
||||
util.inherits(Stream, EE);
|
||||
Stream.Readable = require('_stream_readable');
|
||||
Stream.Writable = require('_stream_writable');
|
||||
Stream.Duplex = require('_stream_duplex');
|
||||
Stream.Transform = require('_stream_transform');
|
||||
Stream.PassThrough = require('_stream_passthrough');
|
||||
|
||||
// Backwards-compat with node 0.4.x
|
||||
Stream.Stream = Stream;
|
||||
|
||||
|
||||
// old-style streams. Note that the pipe method (the only relevant
|
||||
// part of this class) is overridden in the Readable class.
|
||||
|
||||
function Stream() {
|
||||
EE.call(this);
|
||||
}
|
||||
|
||||
Stream.prototype.pipe = function(dest, options) {
|
||||
var source = this;
|
||||
|
||||
function ondata(chunk) {
|
||||
if (dest.writable) {
|
||||
if (false === dest.write(chunk) && source.pause) {
|
||||
source.pause();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
source.on('data', ondata);
|
||||
|
||||
function ondrain() {
|
||||
if (source.readable && source.resume) {
|
||||
source.resume();
|
||||
}
|
||||
}
|
||||
|
||||
dest.on('drain', ondrain);
|
||||
|
||||
// If the 'end' option is not supplied, dest.end() will be called when
|
||||
// source gets the 'end' or 'close' events. Only dest.end() once.
|
||||
if (!dest._isStdio && (!options || options.end !== false)) {
|
||||
source.on('end', onend);
|
||||
source.on('close', onclose);
|
||||
}
|
||||
|
||||
var didOnEnd = false;
|
||||
function onend() {
|
||||
if (didOnEnd) return;
|
||||
didOnEnd = true;
|
||||
|
||||
dest.end();
|
||||
}
|
||||
|
||||
|
||||
function onclose() {
|
||||
if (didOnEnd) return;
|
||||
didOnEnd = true;
|
||||
|
||||
if (typeof dest.destroy === 'function') dest.destroy();
|
||||
}
|
||||
|
||||
// don't leave dangling pipes when there are errors.
|
||||
function onerror(er) {
|
||||
cleanup();
|
||||
if (EE.listenerCount(this, 'error') === 0) {
|
||||
throw er; // Unhandled stream error in pipe.
|
||||
}
|
||||
}
|
||||
|
||||
source.on('error', onerror);
|
||||
dest.on('error', onerror);
|
||||
|
||||
// remove all the event listeners that were added.
|
||||
function cleanup() {
|
||||
source.removeListener('data', ondata);
|
||||
dest.removeListener('drain', ondrain);
|
||||
|
||||
source.removeListener('end', onend);
|
||||
source.removeListener('close', onclose);
|
||||
|
||||
source.removeListener('error', onerror);
|
||||
dest.removeListener('error', onerror);
|
||||
|
||||
source.removeListener('end', cleanup);
|
||||
source.removeListener('close', cleanup);
|
||||
|
||||
dest.removeListener('close', cleanup);
|
||||
}
|
||||
|
||||
source.on('end', cleanup);
|
||||
source.on('close', cleanup);
|
||||
|
||||
dest.on('close', cleanup);
|
||||
|
||||
dest.emit('pipe', source);
|
||||
|
||||
// Allow for unix-like usage: A.pipe(B).pipe(C)
|
||||
return dest;
|
||||
};
|
266
contrib/mORMot/SyNode/core_modules/node_modules/string_decoder.js
generated
vendored
Normal file
266
contrib/mORMot/SyNode/core_modules/node_modules/string_decoder.js
generated
vendored
Normal file
@@ -0,0 +1,266 @@
|
||||
'use strict';
|
||||
/**
|
||||
* @module string_decoder
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
const Buffer = require('buffer').Buffer;
|
||||
const internalUtil = require('internal/util');
|
||||
const isEncoding = Buffer[internalUtil.kIsEncodingSymbol];
|
||||
|
||||
// Do not cache `Buffer.isEncoding` when checking encoding names as some
|
||||
// modules monkey-patch it to support additional encodings
|
||||
function normalizeEncoding(enc) {
|
||||
const nenc = internalUtil.normalizeEncoding(enc);
|
||||
if (typeof nenc !== 'string' &&
|
||||
(Buffer.isEncoding === isEncoding || !Buffer.isEncoding(enc)))
|
||||
throw new Error(`Unknown encoding: ${enc}`);
|
||||
return nenc || enc;
|
||||
}
|
||||
|
||||
// StringDecoder provides an interface for efficiently splitting a series of
|
||||
// buffers into a series of JS strings without breaking apart multi-byte
|
||||
// characters.
|
||||
exports.StringDecoder = StringDecoder;
|
||||
function StringDecoder(encoding) {
|
||||
this.encoding = normalizeEncoding(encoding);
|
||||
var nb;
|
||||
switch (this.encoding) {
|
||||
case 'utf16le':
|
||||
this.text = utf16Text;
|
||||
this.end = utf16End;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'utf8':
|
||||
this.fillLast = utf8FillLast;
|
||||
nb = 4;
|
||||
break;
|
||||
case 'base64':
|
||||
this.text = base64Text;
|
||||
this.end = base64End;
|
||||
nb = 3;
|
||||
break;
|
||||
default:
|
||||
this.write = simpleWrite;
|
||||
this.end = simpleEnd;
|
||||
return;
|
||||
}
|
||||
this.lastNeed = 0;
|
||||
this.lastTotal = 0;
|
||||
this.lastChar = Buffer.allocUnsafe(nb);
|
||||
}
|
||||
|
||||
StringDecoder.prototype.write = function(buf) {
|
||||
if (buf.length === 0)
|
||||
return '';
|
||||
var r;
|
||||
var i;
|
||||
if (this.lastNeed) {
|
||||
r = this.fillLast(buf);
|
||||
if (r === undefined)
|
||||
return '';
|
||||
i = this.lastNeed;
|
||||
this.lastNeed = 0;
|
||||
} else {
|
||||
i = 0;
|
||||
}
|
||||
if (i < buf.length)
|
||||
return (r ? r + this.text(buf, i) : this.text(buf, i));
|
||||
return r || '';
|
||||
};
|
||||
|
||||
StringDecoder.prototype.end = utf8End;
|
||||
|
||||
// Returns only complete characters in a Buffer
|
||||
StringDecoder.prototype.text = utf8Text;
|
||||
|
||||
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
|
||||
StringDecoder.prototype.fillLast = function(buf) {
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
};
|
||||
|
||||
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
|
||||
// continuation byte.
|
||||
function utf8CheckByte(byte) {
|
||||
if (byte <= 0x7F)
|
||||
return 0;
|
||||
else if (byte >> 5 === 0x06)
|
||||
return 2;
|
||||
else if (byte >> 4 === 0x0E)
|
||||
return 3;
|
||||
else if (byte >> 3 === 0x1E)
|
||||
return 4;
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Checks at most 3 bytes at the end of a Buffer in order to detect an
|
||||
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
|
||||
// needed to complete the UTF-8 character (if applicable) are returned.
|
||||
function utf8CheckIncomplete(self, buf, i) {
|
||||
var j = buf.length - 1;
|
||||
if (j < i)
|
||||
return 0;
|
||||
var nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0)
|
||||
self.lastNeed = nb - 1;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i)
|
||||
return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0)
|
||||
self.lastNeed = nb - 2;
|
||||
return nb;
|
||||
}
|
||||
if (--j < i)
|
||||
return 0;
|
||||
nb = utf8CheckByte(buf[j]);
|
||||
if (nb >= 0) {
|
||||
if (nb > 0) {
|
||||
if (nb === 2)
|
||||
nb = 0;
|
||||
else
|
||||
self.lastNeed = nb - 3;
|
||||
}
|
||||
return nb;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Validates as many continuation bytes for a multi-byte UTF-8 character as
|
||||
// needed or are available. If we see a non-continuation byte where we expect
|
||||
// one, we "replace" the validated continuation bytes we've seen so far with
|
||||
// UTF-8 replacement characters ('\ufffd'), to match v8's UTF-8 decoding
|
||||
// behavior. The continuation byte check is included three times in the case
|
||||
// where all of the continuation bytes for a character exist in the same buffer.
|
||||
// It is also done this way as a slight performance increase instead of using a
|
||||
// loop.
|
||||
function utf8CheckExtraBytes(self, buf, p) {
|
||||
if ((buf[0] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 0;
|
||||
return '\ufffd'.repeat(p);
|
||||
}
|
||||
if (self.lastNeed > 1 && buf.length > 1) {
|
||||
if ((buf[1] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 1;
|
||||
return '\ufffd'.repeat(p + 1);
|
||||
}
|
||||
if (self.lastNeed > 2 && buf.length > 2) {
|
||||
if ((buf[2] & 0xC0) !== 0x80) {
|
||||
self.lastNeed = 2;
|
||||
return '\ufffd'.repeat(p + 2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
|
||||
function utf8FillLast(buf) {
|
||||
const p = this.lastTotal - this.lastNeed;
|
||||
var r = utf8CheckExtraBytes(this, buf, p);
|
||||
if (r !== undefined)
|
||||
return r;
|
||||
if (this.lastNeed <= buf.length) {
|
||||
buf.copy(this.lastChar, p, 0, this.lastNeed);
|
||||
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
|
||||
}
|
||||
buf.copy(this.lastChar, p, 0, buf.length);
|
||||
this.lastNeed -= buf.length;
|
||||
}
|
||||
|
||||
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
|
||||
// partial character, the character's bytes are buffered until the required
|
||||
// number of bytes are available.
|
||||
function utf8Text(buf, i) {
|
||||
const total = utf8CheckIncomplete(this, buf, i);
|
||||
if (!this.lastNeed)
|
||||
return buf.toString('utf8', i);
|
||||
this.lastTotal = total;
|
||||
const end = buf.length - (total - this.lastNeed);
|
||||
buf.copy(this.lastChar, 0, end);
|
||||
return buf.toString('utf8', i, end);
|
||||
}
|
||||
|
||||
// For UTF-8, a replacement character for each buffered byte of a (partial)
|
||||
// character needs to be added to the output.
|
||||
function utf8End(buf) {
|
||||
const r = (buf && buf.length ? this.write(buf) : '');
|
||||
if (this.lastNeed)
|
||||
return r + '\ufffd'.repeat(this.lastTotal - this.lastNeed);
|
||||
return r;
|
||||
}
|
||||
|
||||
// UTF-16LE typically needs two bytes per character, but even if we have an even
|
||||
// number of bytes available, we need to check if we end on a leading/high
|
||||
// surrogate. In that case, we need to wait for the next two bytes in order to
|
||||
// decode the last character properly.
|
||||
function utf16Text(buf, i) {
|
||||
if ((buf.length - i) % 2 === 0) {
|
||||
const r = buf.toString('utf16le', i);
|
||||
if (r) {
|
||||
const c = r.charCodeAt(r.length - 1);
|
||||
if (c >= 0xD800 && c <= 0xDBFF) {
|
||||
this.lastNeed = 2;
|
||||
this.lastTotal = 4;
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
return r.slice(0, -1);
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
||||
this.lastNeed = 1;
|
||||
this.lastTotal = 2;
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
return buf.toString('utf16le', i, buf.length - 1);
|
||||
}
|
||||
|
||||
// For UTF-16LE we do not explicitly append special replacement characters if we
|
||||
// end on a partial character, we simply let v8 handle that.
|
||||
function utf16End(buf) {
|
||||
const r = (buf && buf.length ? this.write(buf) : '');
|
||||
if (this.lastNeed) {
|
||||
const end = this.lastTotal - this.lastNeed;
|
||||
return r + this.lastChar.toString('utf16le', 0, end);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
function base64Text(buf, i) {
|
||||
const n = (buf.length - i) % 3;
|
||||
if (n === 0)
|
||||
return buf.toString('base64', i);
|
||||
this.lastNeed = 3 - n;
|
||||
this.lastTotal = 3;
|
||||
if (n === 1) {
|
||||
this.lastChar[0] = buf[buf.length - 1];
|
||||
} else {
|
||||
this.lastChar[0] = buf[buf.length - 2];
|
||||
this.lastChar[1] = buf[buf.length - 1];
|
||||
}
|
||||
return buf.toString('base64', i, buf.length - n);
|
||||
}
|
||||
|
||||
|
||||
function base64End(buf) {
|
||||
const r = (buf && buf.length ? this.write(buf) : '');
|
||||
if (this.lastNeed)
|
||||
return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
|
||||
return r;
|
||||
}
|
||||
|
||||
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
|
||||
function simpleWrite(buf) {
|
||||
return buf.toString(this.encoding);
|
||||
}
|
||||
|
||||
function simpleEnd(buf) {
|
||||
return (buf && buf.length ? this.write(buf) : '');
|
||||
}
|
7
contrib/mORMot/SyNode/core_modules/node_modules/timers.js
generated
vendored
Normal file
7
contrib/mORMot/SyNode/core_modules/node_modules/timers.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* MPV - fake implementation of node.js timer
|
||||
* Just for xml2js work
|
||||
*/
|
||||
module.exports = {
|
||||
setImmediate: global.setImmediate
|
||||
}
|
9
contrib/mORMot/SyNode/core_modules/node_modules/tty.js
generated
vendored
Normal file
9
contrib/mORMot/SyNode/core_modules/node_modules/tty.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* MPV - Fake implementation of nodejs tty
|
||||
* Always return `false` to isatty() call
|
||||
* @module tty
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
exports.isatty = function(fd) {
|
||||
return false;
|
||||
};
|
730
contrib/mORMot/SyNode/core_modules/node_modules/url.js
generated
vendored
Normal file
730
contrib/mORMot/SyNode/core_modules/node_modules/url.js
generated
vendored
Normal file
@@ -0,0 +1,730 @@
|
||||
/**
|
||||
* See <a href="https://nodejs.org/api/url.html">Node <strong>url</strong> module documentation</a>
|
||||
* @module url
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const punycode = require('punycode');
|
||||
|
||||
exports.parse = urlParse;
|
||||
exports.resolve = urlResolve;
|
||||
exports.resolveObject = urlResolveObject;
|
||||
exports.format = urlFormat;
|
||||
|
||||
exports.Url = Url;
|
||||
|
||||
function Url() {
|
||||
this.protocol = null;
|
||||
this.slashes = null;
|
||||
this.auth = null;
|
||||
this.host = null;
|
||||
this.port = null;
|
||||
this.hostname = null;
|
||||
this.hash = null;
|
||||
this.search = null;
|
||||
this.query = null;
|
||||
this.pathname = null;
|
||||
this.path = null;
|
||||
this.href = null;
|
||||
}
|
||||
|
||||
// Reference: RFC 3986, RFC 1808, RFC 2396
|
||||
|
||||
// define these here so at least they only have to be
|
||||
// compiled once on the first module load.
|
||||
const protocolPattern = /^([a-z0-9.+-]+:)/i;
|
||||
const portPattern = /:[0-9]*$/;
|
||||
|
||||
// Special case for a simple path URL
|
||||
const simplePathPattern = /^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/;
|
||||
|
||||
// RFC 2396: characters reserved for delimiting URLs.
|
||||
// We actually just auto-escape these.
|
||||
const delims = ['<', '>', '"', '`', ' ', '\r', '\n', '\t'];
|
||||
|
||||
// RFC 2396: characters not allowed for various reasons.
|
||||
const unwise = ['{', '}', '|', '\\', '^', '`'].concat(delims);
|
||||
|
||||
// Allowed by RFCs, but cause of XSS attacks. Always escape these.
|
||||
const autoEscape = ['\''].concat(unwise);
|
||||
|
||||
// Characters that are never ever allowed in a hostname.
|
||||
// Note that any invalid chars are also handled, but these
|
||||
// are the ones that are *expected* to be seen, so we fast-path them.
|
||||
const nonHostChars = ['%', '/', '?', ';', '#'].concat(autoEscape);
|
||||
const hostEndingChars = ['/', '?', '#'];
|
||||
const hostnameMaxLen = 255;
|
||||
const hostnamePartPattern = /^[+a-z0-9A-Z_-]{0,63}$/;
|
||||
const hostnamePartStart = /^([+a-z0-9A-Z_-]{0,63})(.*)$/;
|
||||
// protocols that can allow "unsafe" and "unwise" chars.
|
||||
const unsafeProtocol = {
|
||||
'javascript': true,
|
||||
'javascript:': true
|
||||
};
|
||||
// protocols that never have a hostname.
|
||||
const hostlessProtocol = {
|
||||
'javascript': true,
|
||||
'javascript:': true
|
||||
};
|
||||
// protocols that always contain a // bit.
|
||||
const slashedProtocol = {
|
||||
'http': true,
|
||||
'https': true,
|
||||
'ftp': true,
|
||||
'gopher': true,
|
||||
'file': true,
|
||||
'http:': true,
|
||||
'https:': true,
|
||||
'ftp:': true,
|
||||
'gopher:': true,
|
||||
'file:': true
|
||||
};
|
||||
const querystring = require('querystring');
|
||||
|
||||
function urlParse(url, parseQueryString, slashesDenoteHost) {
|
||||
if (url instanceof Url) return url;
|
||||
|
||||
var u = new Url();
|
||||
u.parse(url, parseQueryString, slashesDenoteHost);
|
||||
return u;
|
||||
}
|
||||
|
||||
Url.prototype.parse = function(url, parseQueryString, slashesDenoteHost) {
|
||||
if (typeof url !== 'string') {
|
||||
throw new TypeError('Parameter "url" must be a string, not ' + typeof url);
|
||||
}
|
||||
|
||||
// Copy chrome, IE, opera backslash-handling behavior.
|
||||
// Back slashes before the query string get converted to forward slashes
|
||||
// See: https://code.google.com/p/chromium/issues/detail?id=25916
|
||||
var queryIndex = url.indexOf('?'),
|
||||
splitter =
|
||||
(queryIndex !== -1 && queryIndex < url.indexOf('#')) ? '?' : '#',
|
||||
uSplit = url.split(splitter),
|
||||
slashRegex = /\\/g;
|
||||
uSplit[0] = uSplit[0].replace(slashRegex, '/');
|
||||
url = uSplit.join(splitter);
|
||||
|
||||
var rest = url;
|
||||
|
||||
// trim before proceeding.
|
||||
// This is to support parse stuff like " http://foo.com \n"
|
||||
rest = rest.trim();
|
||||
|
||||
if (!slashesDenoteHost && url.split('#').length === 1) {
|
||||
// Try fast path regexp
|
||||
var simplePath = simplePathPattern.exec(rest);
|
||||
if (simplePath) {
|
||||
this.path = rest;
|
||||
this.href = rest;
|
||||
this.pathname = simplePath[1];
|
||||
if (simplePath[2]) {
|
||||
this.search = simplePath[2];
|
||||
if (parseQueryString) {
|
||||
this.query = querystring.parse(this.search.substr(1));
|
||||
} else {
|
||||
this.query = this.search.substr(1);
|
||||
}
|
||||
} else if (parseQueryString) {
|
||||
this.search = '';
|
||||
this.query = {};
|
||||
}
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
var proto = protocolPattern.exec(rest);
|
||||
if (proto) {
|
||||
proto = proto[0];
|
||||
var lowerProto = proto.toLowerCase();
|
||||
this.protocol = lowerProto;
|
||||
rest = rest.substr(proto.length);
|
||||
}
|
||||
|
||||
// figure out if it's got a host
|
||||
// user@server is *always* interpreted as a hostname, and url
|
||||
// resolution will treat //foo/bar as host=foo,path=bar because that's
|
||||
// how the browser resolves relative URLs.
|
||||
if (slashesDenoteHost || proto || rest.match(/^\/\/[^@\/]+@[^@\/]+/)) {
|
||||
var slashes = rest.substr(0, 2) === '//';
|
||||
if (slashes && !(proto && hostlessProtocol[proto])) {
|
||||
rest = rest.substr(2);
|
||||
this.slashes = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hostlessProtocol[proto] &&
|
||||
(slashes || (proto && !slashedProtocol[proto]))) {
|
||||
|
||||
// there's a hostname.
|
||||
// the first instance of /, ?, ;, or # ends the host.
|
||||
//
|
||||
// If there is an @ in the hostname, then non-host chars *are* allowed
|
||||
// to the left of the last @ sign, unless some host-ending character
|
||||
// comes *before* the @-sign.
|
||||
// URLs are obnoxious.
|
||||
//
|
||||
// ex:
|
||||
// http://a@b@c/ => user:a@b host:c
|
||||
// http://a@b?@c => user:a host:b path:/?@c
|
||||
|
||||
// v0.12 TODO(isaacs): This is not quite how Chrome does things.
|
||||
// Review our test case against browsers more comprehensively.
|
||||
|
||||
// find the first instance of any hostEndingChars
|
||||
var hostEnd = -1;
|
||||
for (var i = 0; i < hostEndingChars.length; i++) {
|
||||
var hec = rest.indexOf(hostEndingChars[i]);
|
||||
if (hec !== -1 && (hostEnd === -1 || hec < hostEnd))
|
||||
hostEnd = hec;
|
||||
}
|
||||
|
||||
// at this point, either we have an explicit point where the
|
||||
// auth portion cannot go past, or the last @ char is the decider.
|
||||
var auth, atSign;
|
||||
if (hostEnd === -1) {
|
||||
// atSign can be anywhere.
|
||||
atSign = rest.lastIndexOf('@');
|
||||
} else {
|
||||
// atSign must be in auth portion.
|
||||
// http://a@b/c@d => host:b auth:a path:/c@d
|
||||
atSign = rest.lastIndexOf('@', hostEnd);
|
||||
}
|
||||
|
||||
// Now we have a portion which is definitely the auth.
|
||||
// Pull that off.
|
||||
if (atSign !== -1) {
|
||||
auth = rest.slice(0, atSign);
|
||||
rest = rest.slice(atSign + 1);
|
||||
this.auth = decodeURIComponent(auth);
|
||||
}
|
||||
|
||||
// the host is the remaining to the left of the first non-host char
|
||||
hostEnd = -1;
|
||||
for (var i = 0; i < nonHostChars.length; i++) {
|
||||
var hec = rest.indexOf(nonHostChars[i]);
|
||||
if (hec !== -1 && (hostEnd === -1 || hec < hostEnd))
|
||||
hostEnd = hec;
|
||||
}
|
||||
// if we still have not hit it, then the entire thing is a host.
|
||||
if (hostEnd === -1)
|
||||
hostEnd = rest.length;
|
||||
|
||||
this.host = rest.slice(0, hostEnd);
|
||||
rest = rest.slice(hostEnd);
|
||||
|
||||
// pull out port.
|
||||
this.parseHost();
|
||||
|
||||
// we've indicated that there is a hostname,
|
||||
// so even if it's empty, it has to be present.
|
||||
this.hostname = this.hostname || '';
|
||||
|
||||
// if hostname begins with [ and ends with ]
|
||||
// assume that it's an IPv6 address.
|
||||
var ipv6Hostname = this.hostname[0] === '[' &&
|
||||
this.hostname[this.hostname.length - 1] === ']';
|
||||
|
||||
// validate a little.
|
||||
if (!ipv6Hostname) {
|
||||
var hostparts = this.hostname.split(/\./);
|
||||
for (var i = 0, l = hostparts.length; i < l; i++) {
|
||||
var part = hostparts[i];
|
||||
if (!part) continue;
|
||||
if (!part.match(hostnamePartPattern)) {
|
||||
var newpart = '';
|
||||
for (var j = 0, k = part.length; j < k; j++) {
|
||||
if (part.charCodeAt(j) > 127) {
|
||||
// we replace non-ASCII char with a temporary placeholder
|
||||
// we need this to make sure size of hostname is not
|
||||
// broken by replacing non-ASCII by nothing
|
||||
newpart += 'x';
|
||||
} else {
|
||||
newpart += part[j];
|
||||
}
|
||||
}
|
||||
// we test again with ASCII char only
|
||||
if (!newpart.match(hostnamePartPattern)) {
|
||||
var validParts = hostparts.slice(0, i);
|
||||
var notHost = hostparts.slice(i + 1);
|
||||
var bit = part.match(hostnamePartStart);
|
||||
if (bit) {
|
||||
validParts.push(bit[1]);
|
||||
notHost.unshift(bit[2]);
|
||||
}
|
||||
if (notHost.length) {
|
||||
rest = '/' + notHost.join('.') + rest;
|
||||
}
|
||||
this.hostname = validParts.join('.');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.hostname.length > hostnameMaxLen) {
|
||||
this.hostname = '';
|
||||
} else {
|
||||
// hostnames are always lower case.
|
||||
this.hostname = this.hostname.toLowerCase();
|
||||
}
|
||||
|
||||
if (!ipv6Hostname) {
|
||||
// IDNA Support: Returns a punycoded representation of "domain".
|
||||
// It only converts parts of the domain name that
|
||||
// have non-ASCII characters, i.e. it doesn't matter if
|
||||
// you call it with a domain that already is ASCII-only.
|
||||
this.hostname = punycode.toASCII(this.hostname);
|
||||
}
|
||||
|
||||
var p = this.port ? ':' + this.port : '';
|
||||
var h = this.hostname || '';
|
||||
this.host = h + p;
|
||||
|
||||
// strip [ and ] from the hostname
|
||||
// the host field still retains them, though
|
||||
if (ipv6Hostname) {
|
||||
this.hostname = this.hostname.substr(1, this.hostname.length - 2);
|
||||
if (rest[0] !== '/') {
|
||||
rest = '/' + rest;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// now rest is set to the post-host stuff.
|
||||
// chop off any delim chars.
|
||||
if (!unsafeProtocol[lowerProto]) {
|
||||
|
||||
// First, make 100% sure that any "autoEscape" chars get
|
||||
// escaped, even if encodeURIComponent doesn't think they
|
||||
// need to be.
|
||||
for (var i = 0, l = autoEscape.length; i < l; i++) {
|
||||
var ae = autoEscape[i];
|
||||
if (rest.indexOf(ae) === -1)
|
||||
continue;
|
||||
var esc = encodeURIComponent(ae);
|
||||
if (esc === ae) {
|
||||
esc = escape(ae);
|
||||
}
|
||||
rest = rest.split(ae).join(esc);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// chop off from the tail first.
|
||||
var hash = rest.indexOf('#');
|
||||
if (hash !== -1) {
|
||||
// got a fragment string.
|
||||
this.hash = rest.substr(hash);
|
||||
rest = rest.slice(0, hash);
|
||||
}
|
||||
var qm = rest.indexOf('?');
|
||||
if (qm !== -1) {
|
||||
this.search = rest.substr(qm);
|
||||
this.query = rest.substr(qm + 1);
|
||||
if (parseQueryString) {
|
||||
this.query = querystring.parse(this.query);
|
||||
}
|
||||
rest = rest.slice(0, qm);
|
||||
} else if (parseQueryString) {
|
||||
// no query string, but parseQueryString still requested
|
||||
this.search = '';
|
||||
this.query = {};
|
||||
}
|
||||
if (rest) this.pathname = rest;
|
||||
if (slashedProtocol[lowerProto] &&
|
||||
this.hostname && !this.pathname) {
|
||||
this.pathname = '/';
|
||||
}
|
||||
|
||||
//to support http.request
|
||||
if (this.pathname || this.search) {
|
||||
var p = this.pathname || '';
|
||||
var s = this.search || '';
|
||||
this.path = p + s;
|
||||
}
|
||||
|
||||
// finally, reconstruct the href based on what has been validated.
|
||||
this.href = this.format();
|
||||
return this;
|
||||
};
|
||||
|
||||
// format a parsed object into a url string
|
||||
function urlFormat(obj) {
|
||||
// ensure it's an object, and not a string url.
|
||||
// If it's an obj, this is a no-op.
|
||||
// this way, you can call url_format() on strings
|
||||
// to clean up potentially wonky urls.
|
||||
if (typeof obj === 'string') obj = urlParse(obj);
|
||||
|
||||
else if (typeof obj !== 'object' || obj === null)
|
||||
throw new TypeError('Parameter "urlObj" must be an object, not ' +
|
||||
obj === null ? 'null' : typeof obj);
|
||||
|
||||
else if (!(obj instanceof Url)) return Url.prototype.format.call(obj);
|
||||
|
||||
return obj.format();
|
||||
}
|
||||
|
||||
Url.prototype.format = function() {
|
||||
var auth = this.auth || '';
|
||||
if (auth) {
|
||||
auth = encodeURIComponent(auth);
|
||||
auth = auth.replace(/%3A/i, ':');
|
||||
auth += '@';
|
||||
}
|
||||
|
||||
var protocol = this.protocol || '',
|
||||
pathname = this.pathname || '',
|
||||
hash = this.hash || '',
|
||||
host = false,
|
||||
query = '';
|
||||
|
||||
if (this.host) {
|
||||
host = auth + this.host;
|
||||
} else if (this.hostname) {
|
||||
host = auth + (this.hostname.indexOf(':') === -1 ?
|
||||
this.hostname :
|
||||
'[' + this.hostname + ']');
|
||||
if (this.port) {
|
||||
host += ':' + this.port;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.query !== null &&
|
||||
typeof this.query === 'object' &&
|
||||
Object.keys(this.query).length) {
|
||||
query = querystring.stringify(this.query);
|
||||
}
|
||||
|
||||
var search = this.search || (query && ('?' + query)) || '';
|
||||
|
||||
if (protocol && protocol.substr(-1) !== ':') protocol += ':';
|
||||
|
||||
// only the slashedProtocols get the //. Not mailto:, xmpp:, etc.
|
||||
// unless they had them to begin with.
|
||||
if (this.slashes ||
|
||||
(!protocol || slashedProtocol[protocol]) && host !== false) {
|
||||
host = '//' + (host || '');
|
||||
if (pathname && pathname.charAt(0) !== '/') pathname = '/' + pathname;
|
||||
} else if (!host) {
|
||||
host = '';
|
||||
}
|
||||
|
||||
if (hash && hash.charAt(0) !== '#') hash = '#' + hash;
|
||||
if (search && search.charAt(0) !== '?') search = '?' + search;
|
||||
|
||||
pathname = pathname.replace(/[?#]/g, function(match) {
|
||||
return encodeURIComponent(match);
|
||||
});
|
||||
search = search.replace('#', '%23');
|
||||
|
||||
return protocol + host + pathname + search + hash;
|
||||
};
|
||||
|
||||
function urlResolve(source, relative) {
|
||||
return urlParse(source, false, true).resolve(relative);
|
||||
}
|
||||
|
||||
Url.prototype.resolve = function(relative) {
|
||||
return this.resolveObject(urlParse(relative, false, true)).format();
|
||||
};
|
||||
|
||||
function urlResolveObject(source, relative) {
|
||||
if (!source) return relative;
|
||||
return urlParse(source, false, true).resolveObject(relative);
|
||||
}
|
||||
|
||||
Url.prototype.resolveObject = function(relative) {
|
||||
if (typeof relative === 'string') {
|
||||
var rel = new Url();
|
||||
rel.parse(relative, false, true);
|
||||
relative = rel;
|
||||
}
|
||||
|
||||
var result = new Url();
|
||||
var tkeys = Object.keys(this);
|
||||
for (var tk = 0; tk < tkeys.length; tk++) {
|
||||
var tkey = tkeys[tk];
|
||||
result[tkey] = this[tkey];
|
||||
}
|
||||
|
||||
// hash is always overridden, no matter what.
|
||||
// even href="" will remove it.
|
||||
result.hash = relative.hash;
|
||||
|
||||
// if the relative url is empty, then there's nothing left to do here.
|
||||
if (relative.href === '') {
|
||||
result.href = result.format();
|
||||
return result;
|
||||
}
|
||||
|
||||
// hrefs like //foo/bar always cut to the protocol.
|
||||
if (relative.slashes && !relative.protocol) {
|
||||
// take everything except the protocol from relative
|
||||
var rkeys = Object.keys(relative);
|
||||
for (var rk = 0; rk < rkeys.length; rk++) {
|
||||
var rkey = rkeys[rk];
|
||||
if (rkey !== 'protocol')
|
||||
result[rkey] = relative[rkey];
|
||||
}
|
||||
|
||||
//urlParse appends trailing / to urls like http://www.example.com
|
||||
if (slashedProtocol[result.protocol] &&
|
||||
result.hostname && !result.pathname) {
|
||||
result.path = result.pathname = '/';
|
||||
}
|
||||
|
||||
result.href = result.format();
|
||||
return result;
|
||||
}
|
||||
|
||||
if (relative.protocol && relative.protocol !== result.protocol) {
|
||||
// if it's a known url protocol, then changing
|
||||
// the protocol does weird things
|
||||
// first, if it's not file:, then we MUST have a host,
|
||||
// and if there was a path
|
||||
// to begin with, then we MUST have a path.
|
||||
// if it is file:, then the host is dropped,
|
||||
// because that's known to be hostless.
|
||||
// anything else is assumed to be absolute.
|
||||
if (!slashedProtocol[relative.protocol]) {
|
||||
var keys = Object.keys(relative);
|
||||
for (var v = 0; v < keys.length; v++) {
|
||||
var k = keys[v];
|
||||
result[k] = relative[k];
|
||||
}
|
||||
result.href = result.format();
|
||||
return result;
|
||||
}
|
||||
|
||||
result.protocol = relative.protocol;
|
||||
if (!relative.host &&
|
||||
!/^file:?$/.test(relative.protocol) &&
|
||||
!hostlessProtocol[relative.protocol]) {
|
||||
var relPath = (relative.pathname || '').split('/');
|
||||
while (relPath.length && !(relative.host = relPath.shift()));
|
||||
if (!relative.host) relative.host = '';
|
||||
if (!relative.hostname) relative.hostname = '';
|
||||
if (relPath[0] !== '') relPath.unshift('');
|
||||
if (relPath.length < 2) relPath.unshift('');
|
||||
result.pathname = relPath.join('/');
|
||||
} else {
|
||||
result.pathname = relative.pathname;
|
||||
}
|
||||
result.search = relative.search;
|
||||
result.query = relative.query;
|
||||
result.host = relative.host || '';
|
||||
result.auth = relative.auth;
|
||||
result.hostname = relative.hostname || relative.host;
|
||||
result.port = relative.port;
|
||||
// to support http.request
|
||||
if (result.pathname || result.search) {
|
||||
var p = result.pathname || '';
|
||||
var s = result.search || '';
|
||||
result.path = p + s;
|
||||
}
|
||||
result.slashes = result.slashes || relative.slashes;
|
||||
result.href = result.format();
|
||||
return result;
|
||||
}
|
||||
|
||||
var isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'),
|
||||
isRelAbs = (
|
||||
relative.host ||
|
||||
relative.pathname && relative.pathname.charAt(0) === '/'
|
||||
),
|
||||
mustEndAbs = (isRelAbs || isSourceAbs ||
|
||||
(result.host && relative.pathname)),
|
||||
removeAllDots = mustEndAbs,
|
||||
srcPath = result.pathname && result.pathname.split('/') || [],
|
||||
relPath = relative.pathname && relative.pathname.split('/') || [],
|
||||
psychotic = result.protocol && !slashedProtocol[result.protocol];
|
||||
|
||||
// if the url is a non-slashed url, then relative
|
||||
// links like ../.. should be able
|
||||
// to crawl up to the hostname, as well. This is strange.
|
||||
// result.protocol has already been set by now.
|
||||
// Later on, put the first path part into the host field.
|
||||
if (psychotic) {
|
||||
result.hostname = '';
|
||||
result.port = null;
|
||||
if (result.host) {
|
||||
if (srcPath[0] === '') srcPath[0] = result.host;
|
||||
else srcPath.unshift(result.host);
|
||||
}
|
||||
result.host = '';
|
||||
if (relative.protocol) {
|
||||
relative.hostname = null;
|
||||
relative.port = null;
|
||||
if (relative.host) {
|
||||
if (relPath[0] === '') relPath[0] = relative.host;
|
||||
else relPath.unshift(relative.host);
|
||||
}
|
||||
relative.host = null;
|
||||
}
|
||||
mustEndAbs = mustEndAbs && (relPath[0] === '' || srcPath[0] === '');
|
||||
}
|
||||
|
||||
if (isRelAbs) {
|
||||
// it's absolute.
|
||||
result.host = (relative.host || relative.host === '') ?
|
||||
relative.host : result.host;
|
||||
result.hostname = (relative.hostname || relative.hostname === '') ?
|
||||
relative.hostname : result.hostname;
|
||||
result.search = relative.search;
|
||||
result.query = relative.query;
|
||||
srcPath = relPath;
|
||||
// fall through to the dot-handling below.
|
||||
} else if (relPath.length) {
|
||||
// it's relative
|
||||
// throw away the existing file, and take the new path instead.
|
||||
if (!srcPath) srcPath = [];
|
||||
srcPath.pop();
|
||||
srcPath = srcPath.concat(relPath);
|
||||
result.search = relative.search;
|
||||
result.query = relative.query;
|
||||
} else if (relative.search !== null && relative.search !== undefined) {
|
||||
// just pull out the search.
|
||||
// like href='?foo'.
|
||||
// Put this after the other two cases because it simplifies the booleans
|
||||
if (psychotic) {
|
||||
result.hostname = result.host = srcPath.shift();
|
||||
//occationaly the auth can get stuck only in host
|
||||
//this especially happens in cases like
|
||||
//url.resolveObject('mailto:local1@domain1', 'local2@domain2')
|
||||
var authInHost = result.host && result.host.indexOf('@') > 0 ?
|
||||
result.host.split('@') : false;
|
||||
if (authInHost) {
|
||||
result.auth = authInHost.shift();
|
||||
result.host = result.hostname = authInHost.shift();
|
||||
}
|
||||
}
|
||||
result.search = relative.search;
|
||||
result.query = relative.query;
|
||||
//to support http.request
|
||||
if (result.pathname !== null || result.search !== null) {
|
||||
result.path = (result.pathname ? result.pathname : '') +
|
||||
(result.search ? result.search : '');
|
||||
}
|
||||
result.href = result.format();
|
||||
return result;
|
||||
}
|
||||
|
||||
if (!srcPath.length) {
|
||||
// no path at all. easy.
|
||||
// we've already handled the other stuff above.
|
||||
result.pathname = null;
|
||||
//to support http.request
|
||||
if (result.search) {
|
||||
result.path = '/' + result.search;
|
||||
} else {
|
||||
result.path = null;
|
||||
}
|
||||
result.href = result.format();
|
||||
return result;
|
||||
}
|
||||
|
||||
// if a url ENDs in . or .., then it must get a trailing slash.
|
||||
// however, if it ends in anything else non-slashy,
|
||||
// then it must NOT get a trailing slash.
|
||||
var last = srcPath.slice(-1)[0];
|
||||
var hasTrailingSlash = (
|
||||
(result.host || relative.host || srcPath.length > 1) &&
|
||||
(last === '.' || last === '..') || last === '');
|
||||
|
||||
// strip single dots, resolve double dots to parent dir
|
||||
// if the path tries to go above the root, `up` ends up > 0
|
||||
var up = 0;
|
||||
for (var i = srcPath.length; i >= 0; i--) {
|
||||
last = srcPath[i];
|
||||
if (last === '.') {
|
||||
spliceOne(srcPath, i);
|
||||
} else if (last === '..') {
|
||||
spliceOne(srcPath, i);
|
||||
up++;
|
||||
} else if (up) {
|
||||
spliceOne(srcPath, i);
|
||||
up--;
|
||||
}
|
||||
}
|
||||
|
||||
// if the path is allowed to go above the root, restore leading ..s
|
||||
if (!mustEndAbs && !removeAllDots) {
|
||||
for (; up--; up) {
|
||||
srcPath.unshift('..');
|
||||
}
|
||||
}
|
||||
|
||||
if (mustEndAbs && srcPath[0] !== '' &&
|
||||
(!srcPath[0] || srcPath[0].charAt(0) !== '/')) {
|
||||
srcPath.unshift('');
|
||||
}
|
||||
|
||||
if (hasTrailingSlash && (srcPath.join('/').substr(-1) !== '/')) {
|
||||
srcPath.push('');
|
||||
}
|
||||
|
||||
var isAbsolute = srcPath[0] === '' ||
|
||||
(srcPath[0] && srcPath[0].charAt(0) === '/');
|
||||
|
||||
// put the host back
|
||||
if (psychotic) {
|
||||
result.hostname = result.host = isAbsolute ? '' :
|
||||
srcPath.length ? srcPath.shift() : '';
|
||||
//occationaly the auth can get stuck only in host
|
||||
//this especially happens in cases like
|
||||
//url.resolveObject('mailto:local1@domain1', 'local2@domain2')
|
||||
var authInHost = result.host && result.host.indexOf('@') > 0 ?
|
||||
result.host.split('@') : false;
|
||||
if (authInHost) {
|
||||
result.auth = authInHost.shift();
|
||||
result.host = result.hostname = authInHost.shift();
|
||||
}
|
||||
}
|
||||
|
||||
mustEndAbs = mustEndAbs || (result.host && srcPath.length);
|
||||
|
||||
if (mustEndAbs && !isAbsolute) {
|
||||
srcPath.unshift('');
|
||||
}
|
||||
|
||||
if (!srcPath.length) {
|
||||
result.pathname = null;
|
||||
result.path = null;
|
||||
} else {
|
||||
result.pathname = srcPath.join('/');
|
||||
}
|
||||
|
||||
//to support request.http
|
||||
if (result.pathname !== null || result.search !== null) {
|
||||
result.path = (result.pathname ? result.pathname : '') +
|
||||
(result.search ? result.search : '');
|
||||
}
|
||||
result.auth = relative.auth || result.auth;
|
||||
result.slashes = result.slashes || relative.slashes;
|
||||
result.href = result.format();
|
||||
return result;
|
||||
};
|
||||
|
||||
Url.prototype.parseHost = function() {
|
||||
var host = this.host;
|
||||
var port = portPattern.exec(host);
|
||||
if (port) {
|
||||
port = port[0];
|
||||
if (port !== ':') {
|
||||
this.port = port.substr(1);
|
||||
}
|
||||
host = host.substr(0, host.length - port.length);
|
||||
}
|
||||
if (host) this.hostname = host;
|
||||
};
|
||||
|
||||
// About 1.5x faster than the two-arg version of Array#splice().
|
||||
function spliceOne(list, index) {
|
||||
for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
|
||||
list[i] = list[k];
|
||||
list.pop();
|
||||
}
|
1209
contrib/mORMot/SyNode/core_modules/node_modules/util.js
generated
vendored
Normal file
1209
contrib/mORMot/SyNode/core_modules/node_modules/util.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
19
contrib/mORMot/SyNode/core_modules/node_modules/vm.js
generated
vendored
Normal file
19
contrib/mORMot/SyNode/core_modules/node_modules/vm.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* Fake vm
|
||||
* @module vm
|
||||
* @memberOf module:buildin
|
||||
*/
|
||||
|
||||
const {runInThisContext, loadDll} = process.binding('modules');
|
||||
/**
|
||||
* Node expect this config
|
||||
{
|
||||
filename: filename,
|
||||
lineOffset: 0,
|
||||
displayErrors: true
|
||||
}
|
||||
*/
|
||||
exports.runInThisContext = function(code, config){
|
||||
return runInThisContext(code, config.filename)
|
||||
}
|
||||
exports.runInDebugContext = function(){};
|
1
contrib/mORMot/SyNode/core_modules/node_modules/zlib.js
generated
vendored
Normal file
1
contrib/mORMot/SyNode/core_modules/node_modules/zlib.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
// fake zlib
|
255
contrib/mORMot/SyNode/core_modules/synode.js
Normal file
255
contrib/mORMot/SyNode/core_modules/synode.js
Normal file
@@ -0,0 +1,255 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
*/
|
||||
|
||||
const {coreModulesPath, runInThisContext, runInThisContextRes, _coreModulesInRes} = process.binding('modules');
|
||||
const {loadFile} = process.binding('fs');
|
||||
let Module;
|
||||
|
||||
|
||||
/**
|
||||
* @namespace process
|
||||
* @property {string} startupPath Use a process.cwd() instead
|
||||
* @property {string} execPath The main executable full path (including .exe file name)
|
||||
*/
|
||||
|
||||
function startup() {
|
||||
/**
|
||||
* Current working directory
|
||||
* @return {string|String}
|
||||
*/
|
||||
process.cwd = function () {
|
||||
return process.startupPath;
|
||||
};
|
||||
/**
|
||||
* List of loaded via `require` modules
|
||||
* @private
|
||||
* @type {Array<string>}
|
||||
*/
|
||||
process.moduleLoadList = [];
|
||||
|
||||
Module = NativeModule.require('module');
|
||||
Module.call(global, ['.']);
|
||||
process.mainModule = global;
|
||||
|
||||
//noinspection JSUndeclaredVariable
|
||||
/**
|
||||
* Load a module. Acts like a <a href="http://nodejs.org/api/modules.html">Node JS</a> require, with 1 difference:
|
||||
*
|
||||
* - in case we run in production mode (`!process.isDebug`) and minimized version of main module exists, it will be loaded.
|
||||
* By "minimized version" we mean package.json `main` entry with `.min.js` extension <br>
|
||||
*
|
||||
* *In case you need to debug from there module is loaded set OS Environment variable*
|
||||
* `>SET NODE_DEBUG=modules` *and restart server - require will put to debug log all information about how module are loaded.* Do not do this on production, of course :)
|
||||
*
|
||||
* @global
|
||||
* @method
|
||||
* @param {String} moduleName
|
||||
* @returns {*}
|
||||
*/
|
||||
global.require = Module.prototype.require;
|
||||
global.Buffer = NativeModule.require('buffer').Buffer;
|
||||
//global.clearTimeout = function() {};
|
||||
|
||||
/**
|
||||
* Block thread for a specified number of milliseconds
|
||||
* @param {Number} ms millisecond to sleep
|
||||
* @global
|
||||
*/
|
||||
global.sleep = process.binding('syNode').sleep;
|
||||
|
||||
const EventEmitter = NativeModule.require('events').EventEmitter;
|
||||
// add EventEmitter to process object
|
||||
EventEmitter.call(process);
|
||||
Object.assign(process, EventEmitter.prototype);
|
||||
|
||||
const WindowTimer = NativeModule.require('polyfill/WindowTimer');
|
||||
global._timerLoop = WindowTimer.makeWindowTimer(global, function (ms) { global.sleep(ms); });
|
||||
/**
|
||||
* This function is just to be compatible with node.js
|
||||
* @param {Function} callback Callback (called immediately in SyNode)
|
||||
*/
|
||||
process.nextTick = function(callback, arg1, arg2, arg3){
|
||||
if (typeof callback !== 'function') {
|
||||
throw new TypeError('"callback" argument must be a function');
|
||||
}
|
||||
// on the way out, don't bother. it won't get fired anyway.
|
||||
if (process._exiting)
|
||||
return;
|
||||
|
||||
var i, args;
|
||||
|
||||
switch (arguments.length) {
|
||||
// fast cases
|
||||
case 1:
|
||||
break;
|
||||
case 2:
|
||||
args = [arg1];
|
||||
break;
|
||||
case 3:
|
||||
args = [arg1, arg2];
|
||||
break;
|
||||
default:
|
||||
args = [arg1, arg2, arg3];
|
||||
for (i = 4; i < arguments.length; i++)
|
||||
args[i - 1] = arguments[i];
|
||||
break;
|
||||
}
|
||||
global._timerLoop.setTimeoutWithPriority.apply(undefined, [callback, 0, -1].concat(args));
|
||||
};
|
||||
|
||||
/**
|
||||
* This function is to be compatible with node.js
|
||||
* @global
|
||||
* @param {Function} callback
|
||||
* @param {...*} arg
|
||||
* @return {Number} immediateId
|
||||
*/
|
||||
global.setImmediate = function(callback, arg1, arg2, arg3){
|
||||
if (typeof callback !== 'function') {
|
||||
throw new TypeError('"callback" argument must be a function');
|
||||
}
|
||||
// on the way out, don't bother. it won't get fired anyway.
|
||||
if (process._exiting)
|
||||
return;
|
||||
|
||||
var i, args;
|
||||
|
||||
switch (arguments.length) {
|
||||
// fast cases
|
||||
case 1:
|
||||
break;
|
||||
case 2:
|
||||
args = [arg1];
|
||||
break;
|
||||
case 3:
|
||||
args = [arg1, arg2];
|
||||
break;
|
||||
default:
|
||||
args = [arg1, arg2, arg3];
|
||||
for (i = 4; i < arguments.length; i++)
|
||||
args[i - 1] = arguments[i];
|
||||
break;
|
||||
}
|
||||
global._timerLoop.setTimeoutWithPriority.apply(undefined, [callback, 0, 1].concat(args));
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
function NativeModule(id) {
|
||||
this.filename = id + '.js';
|
||||
this.id = id;
|
||||
this.exports = {};
|
||||
this.loaded = false;
|
||||
}
|
||||
|
||||
const NODE_CORE_MODULES = ['fs', 'util', 'path', 'assert', 'module', 'console', 'events','vm',
|
||||
'net', 'os', 'punycode', 'querystring', 'timers', 'tty', 'url', 'child_process', 'http', 'https',
|
||||
'crypto', 'zlib', 'dns', //fake modules
|
||||
'buffer', 'string_decoder', 'internal/util', 'internal/module', 'stream', '_stream_readable', '_stream_writable',
|
||||
'internal/streams/BufferList', '_stream_duplex', '_stream_transform', '_stream_passthrough',
|
||||
'internal/fs',
|
||||
'internal/errors', 'internal/querystring',
|
||||
'polyfill/WindowTimer'];
|
||||
|
||||
NativeModule._source = {};
|
||||
const PATH_DELIM = process.platform === 'win32' ? '\\' : '/'
|
||||
NODE_CORE_MODULES.forEach( (module_name) => {
|
||||
NativeModule._source[module_name] = _coreModulesInRes
|
||||
? `node_modules/${module_name}.js`.toUpperCase()
|
||||
: `${coreModulesPath}${PATH_DELIM}node_modules${PATH_DELIM}${module_name}.js`
|
||||
});
|
||||
|
||||
NativeModule._cache = {};
|
||||
|
||||
NativeModule.require = function (id) {
|
||||
if (id == 'native_module') {
|
||||
return NativeModule;
|
||||
}
|
||||
|
||||
var cached = NativeModule.getCached(id);
|
||||
if (cached) {
|
||||
return cached.exports;
|
||||
}
|
||||
|
||||
if (!NativeModule.exists(id)) {
|
||||
throw new Error('No such native module ' + id);
|
||||
}
|
||||
|
||||
process.moduleLoadList.push('NativeModule ' + id);
|
||||
|
||||
var nativeModule = new NativeModule(id);
|
||||
|
||||
nativeModule.cache();
|
||||
nativeModule.compile();
|
||||
|
||||
return nativeModule.exports;
|
||||
};
|
||||
|
||||
NativeModule.getCached = function (id) {
|
||||
if (NativeModule._cache.hasOwnProperty(id)) {
|
||||
return NativeModule._cache[id]
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
NativeModule.exists = function (id) {
|
||||
return NativeModule._source.hasOwnProperty(id);
|
||||
};
|
||||
|
||||
const EXPOSE_INTERNALS = false;
|
||||
/* MPV
|
||||
const EXPOSE_INTERNALS = process.execArgv.some(function(arg) {
|
||||
return arg.match(/^--expose[-_]internals$/);
|
||||
});
|
||||
*/
|
||||
if (EXPOSE_INTERNALS) {
|
||||
NativeModule.nonInternalExists = NativeModule.exists;
|
||||
|
||||
NativeModule.isInternal = function(id) {
|
||||
return false;
|
||||
};
|
||||
} else {
|
||||
NativeModule.nonInternalExists = function(id) {
|
||||
return NativeModule.exists(id) && !NativeModule.isInternal(id);
|
||||
};
|
||||
|
||||
NativeModule.isInternal = function(id) {
|
||||
return id.startsWith('internal/');
|
||||
};
|
||||
}
|
||||
|
||||
NativeModule.getSource = function (id) {
|
||||
return loadFile(NativeModule._source[id]);
|
||||
};
|
||||
|
||||
NativeModule.wrap = function (script) {
|
||||
return NativeModule.wrapper[0] + script + NativeModule.wrapper[1];
|
||||
};
|
||||
|
||||
NativeModule.wrapper = [
|
||||
'(function (exports, require, module, __filename, __dirname) { ', '\n});'
|
||||
];
|
||||
|
||||
NativeModule.prototype.compile = function () {
|
||||
let fn;
|
||||
if (_coreModulesInRes) {
|
||||
fn = runInThisContextRes(NativeModule._source[this.id], this.filename, true);
|
||||
} else {
|
||||
let source = NativeModule.getSource(this.id);
|
||||
source = NativeModule.wrap(source);
|
||||
fn = runInThisContext(source, this.filename, true);
|
||||
}
|
||||
fn(this.exports, NativeModule.require, this, this.filename);
|
||||
this.loaded = true;
|
||||
};
|
||||
|
||||
NativeModule.prototype.cache = function () {
|
||||
NativeModule._cache[this.id] = this;
|
||||
};
|
||||
|
||||
startup();
|
||||
///patch ModuleLoader
|
Reference in New Issue
Block a user