1
0
mirror of https://github.com/mgerb/mywebsite synced 2026-01-13 19:12:49 +00:00

updated bunch of file paths and changed the way posts are loaded

This commit is contained in:
2016-01-05 12:28:04 -06:00
parent 719ae331ae
commit c96a84d0ff
13249 changed files with 317868 additions and 2101398 deletions

View File

@@ -1,59 +1,86 @@
{
"name": "async",
"description": "Higher-order functions and common patterns for asynchronous code",
"main": "./lib/async",
"_args": [
[
"async@0.9.0",
"/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/mongoose"
]
],
"_from": "async@0.9.0",
"_id": "async@0.9.0",
"_inCache": true,
"_installable": true,
"_location": "/mongoose/async",
"_npmUser": {
"email": "caolan.mcmahon@gmail.com",
"name": "caolan"
},
"_npmVersion": "1.4.3",
"_phantomChildren": {},
"_requested": {
"name": "async",
"raw": "async@0.9.0",
"rawSpec": "0.9.0",
"scope": null,
"spec": "0.9.0",
"type": "version"
},
"_requiredBy": [
"/mongoose"
],
"_resolved": "https://registry.npmjs.org/async/-/async-0.9.0.tgz",
"_shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
"_shrinkwrap": null,
"_spec": "async@0.9.0",
"_where": "/home/mitchell/Desktop/test-mywebsite/mywebsite/node_modules/mongoose",
"author": {
"name": "Caolan McMahon"
},
"version": "0.9.0",
"repository": {
"type": "git",
"url": "https://github.com/caolan/async.git"
},
"bugs": {
"url": "https://github.com/caolan/async/issues"
},
"dependencies": {},
"description": "Higher-order functions and common patterns for asynchronous code",
"devDependencies": {
"nodelint": ">0.0.0",
"nodeunit": ">0.0.0",
"uglify-js": "1.2.x"
},
"directories": {},
"dist": {
"shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
"tarball": "http://registry.npmjs.org/async/-/async-0.9.0.tgz"
},
"homepage": "https://github.com/caolan/async",
"jam": {
"include": [
"LICENSE",
"README.md",
"lib/async.js"
],
"main": "lib/async.js"
},
"licenses": [
{
"type": "MIT",
"url": "https://github.com/caolan/async/raw/master/LICENSE"
}
],
"devDependencies": {
"nodeunit": ">0.0.0",
"uglify-js": "1.2.x",
"nodelint": ">0.0.0"
},
"jam": {
"main": "lib/async.js",
"include": [
"lib/async.js",
"README.md",
"LICENSE"
]
},
"scripts": {
"test": "nodeunit test/test-async.js"
},
"homepage": "https://github.com/caolan/async",
"_id": "async@0.9.0",
"dist": {
"shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
"tarball": "http://registry.npmjs.org/async/-/async-0.9.0.tgz"
},
"_from": "async@0.9.0",
"_npmVersion": "1.4.3",
"_npmUser": {
"name": "caolan",
"email": "caolan.mcmahon@gmail.com"
},
"main": "./lib/async",
"maintainers": [
{
"name": "caolan",
"email": "caolan@caolanmcmahon.com"
}
],
"directories": {},
"_shasum": "ac3613b1da9bed1b47510bb4651b8931e47146c7",
"_resolved": "https://registry.npmjs.org/async/-/async-0.9.0.tgz"
"name": "async",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/caolan/async.git"
},
"scripts": {
"test": "nodeunit test/test-async.js"
},
"version": "0.9.0"
}

View File

@@ -1,4 +0,0 @@
language: node_js
node_js:
- 0.10 # development version of 0.8, may be unstable
- 0.12

View File

@@ -1,3 +1,87 @@
0.4.19 2015-10-15
-----------------
- Remove all support for bson-ext.
0.4.18 2015-10-15
-----------------
- ObjectID equality check should return boolean instead of throwing exception for invalid oid string #139
- add option for deserializing binary into Buffer object #116
0.4.17 2015-10-15
-----------------
- Validate regexp string for null bytes and throw if there is one.
0.4.16 2015-10-07
-----------------
- Fixed issue with return statement in Map.js.
0.4.15 2015-10-06
-----------------
- Exposed Map correctly via index.js file.
0.4.14 2015-10-06
-----------------
- Exposed Map correctly via bson.js file.
0.4.13 2015-10-06
-----------------
- Added ES6 Map type serialization as well as a polyfill for ES5.
0.4.12 2015-09-18
-----------------
- Made ignore undefined an optional parameter.
0.4.11 2015-08-06
-----------------
- Minor fix for invalid key checking.
0.4.10 2015-08-06
-----------------
- NODE-38 Added new BSONRegExp type to allow direct serialization to MongoDB type.
- Some performance improvements by in lining code.
0.4.9 2015-08-06
----------------
- Undefined fields are omitted from serialization in objects.
0.4.8 2015-07-14
----------------
- Fixed size validation to ensure we can deserialize from dumped files.
0.4.7 2015-06-26
----------------
- Added ability to instruct deserializer to return raw BSON buffers for named array fields.
- Minor deserialization optimization by moving inlined function out.
0.4.6 2015-06-17
----------------
- Fixed serializeWithBufferAndIndex bug.
0.4.5 2015-06-17
----------------
- Removed any references to the shared buffer to avoid non GC collectible bson instances.
0.4.4 2015-06-17
----------------
- Fixed rethrowing of error when not RangeError.
0.4.3 2015-06-17
----------------
- Start buffer at 64K and double as needed, meaning we keep a low memory profile until needed.
0.4.2 2015-06-16
----------------
- More fixes for corrupt Bson
0.4.1 2015-06-16
----------------
- More fixes for corrupt Bson
0.4.0 2015-06-16
----------------
- New JS serializer serializing into a single buffer then copying out the new buffer. Performance is similar to current C++ parser.
- Removed bson-ext extension dependency for now.
0.3.2 2015-03-27
----------------
- Removed node-gyp from install script in package.json.
@@ -39,4 +123,4 @@
0.1.4 2012-09-25
----------------
- Added precompiled c++ native extensions for win32 ia32 and x64
- Added precompiled c++ native extensions for win32 ia32 and x64

View File

@@ -35,7 +35,7 @@ A simple example of how to use BSON in `node.js`:
```javascript
var bson = require("bson");
var BSON = bson.BSONPure.BSON;
var BSON = new bson.BSONPure.BSON();
var Long = bson.BSONPure.Long;
var doc = {long: Long.fromNumber(100)}
@@ -63,6 +63,7 @@ The API consists of two simple methods to serialize/deserialize objects to/from
* **evalFunctions** {Boolean, default:false}, evaluate functions in the BSON document scoped to the object deserialized.
* **cacheFunctions** {Boolean, default:false}, cache evaluated functions for reuse.
* **cacheFunctionsCrc32** {Boolean, default:false}, use a crc32 code for caching, otherwise use the string of the function.
* **promoteBuffers** {Boolean, default:false}, deserialize Binary data directly into node.js Buffer object.
* @param {TypedArray/Array} a TypedArray/Array containing the BSON data
* @param {Object} [options] additional options used for the deserialization.
* @param {Boolean} [isArray] ignore used for recursive parsing.

View File

@@ -2,13 +2,13 @@
* Module dependencies.
* @ignore
*/
if(typeof window === 'undefined') {
if(typeof window === 'undefined') {
var Buffer = require('buffer').Buffer; // TODO just use global Buffer
}
/**
* A class representation of the BSON Binary type.
*
*
* Sub types
* - **BSON.BSON_BINARY_SUBTYPE_DEFAULT**, default BSON type.
* - **BSON.BSON_BINARY_SUBTYPE_FUNCTION**, BSON function type.
@@ -24,13 +24,13 @@ if(typeof window === 'undefined') {
*/
function Binary(buffer, subType) {
if(!(this instanceof Binary)) return new Binary(buffer, subType);
this._bsontype = 'Binary';
if(buffer instanceof Number) {
this.sub_type = buffer;
this.position = 0;
} else {
} else {
this.sub_type = subType == null ? BSON_BINARY_SUBTYPE_DEFAULT : subType;
this.position = 0;
}
@@ -47,12 +47,12 @@ function Binary(buffer, subType) {
throw new Error("only String, Buffer, Uint8Array or Array accepted");
}
} else {
this.buffer = buffer;
this.buffer = buffer;
}
this.position = buffer.length;
} else {
if(typeof Buffer != 'undefined') {
this.buffer = new Buffer(Binary.BUFFER_SIZE);
this.buffer = new Buffer(Binary.BUFFER_SIZE);
} else if(typeof Uint8Array != 'undefined'){
this.buffer = new Uint8Array(new ArrayBuffer(Binary.BUFFER_SIZE));
} else {
@@ -73,21 +73,21 @@ Binary.prototype.put = function put(byte_value) {
// If it's a string and a has more than one character throw an error
if(byte_value['length'] != null && typeof byte_value != 'number' && byte_value.length != 1) throw new Error("only accepts single character String, Uint8Array or Array");
if(typeof byte_value != 'number' && byte_value < 0 || byte_value > 255) throw new Error("only accepts number in a valid unsigned byte range 0-255");
// Decode the byte value once
var decoded_byte = null;
if(typeof byte_value == 'string') {
decoded_byte = byte_value.charCodeAt(0);
decoded_byte = byte_value.charCodeAt(0);
} else if(byte_value['length'] != null) {
decoded_byte = byte_value[0];
} else {
decoded_byte = byte_value;
}
if(this.buffer.length > this.position) {
this.buffer[this.position++] = decoded_byte;
} else {
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
// Create additional overflow buffer
var buffer = new Buffer(Binary.BUFFER_SIZE + this.buffer.length);
// Combine the two buffers together
@@ -101,13 +101,13 @@ Binary.prototype.put = function put(byte_value) {
buffer = new Uint8Array(new ArrayBuffer(Binary.BUFFER_SIZE + this.buffer.length));
} else {
buffer = new Array(Binary.BUFFER_SIZE + this.buffer.length);
}
}
// We need to copy all the content to the new array
for(var i = 0; i < this.buffer.length; i++) {
buffer[i] = this.buffer[i];
}
// Reassign the buffer
this.buffer = buffer;
// Write the byte
@@ -131,9 +131,9 @@ Binary.prototype.write = function write(string, offset) {
if(this.buffer.length < offset + string.length) {
var buffer = null;
// If we are in node.js
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
buffer = new Buffer(this.buffer.length + string.length);
this.buffer.copy(buffer, 0, 0, this.buffer.length);
this.buffer.copy(buffer, 0, 0, this.buffer.length);
} else if(Object.prototype.toString.call(this.buffer) == '[object Uint8Array]') {
// Create a new buffer
buffer = new Uint8Array(new ArrayBuffer(this.buffer.length + string.length))
@@ -142,7 +142,7 @@ Binary.prototype.write = function write(string, offset) {
buffer[i] = this.buffer[i];
}
}
// Assign the new buffer
this.buffer = buffer;
}
@@ -152,14 +152,14 @@ Binary.prototype.write = function write(string, offset) {
this.position = (offset + string.length) > this.position ? (offset + string.length) : this.position;
// offset = string.length
} else if(typeof Buffer != 'undefined' && typeof string == 'string' && Buffer.isBuffer(this.buffer)) {
this.buffer.write(string, 'binary', offset);
this.buffer.write(string, offset, 'binary');
this.position = (offset + string.length) > this.position ? (offset + string.length) : this.position;
// offset = string.length;
} else if(Object.prototype.toString.call(string) == '[object Uint8Array]'
|| Object.prototype.toString.call(string) == '[object Array]' && typeof string != 'string') {
} else if(Object.prototype.toString.call(string) == '[object Uint8Array]'
|| Object.prototype.toString.call(string) == '[object Array]' && typeof string != 'string') {
for(var i = 0; i < string.length; i++) {
this.buffer[offset++] = string[i];
}
}
this.position = offset > this.position ? offset : this.position;
} else if(typeof string == 'string') {
@@ -183,7 +183,7 @@ Binary.prototype.read = function read(position, length) {
length = length && length > 0
? length
: this.position;
// Let's return the data based on the type we have
if(this.buffer['slice']) {
return this.buffer.slice(position, position + length);
@@ -205,12 +205,12 @@ Binary.prototype.read = function read(position, length) {
* @return {string}
*/
Binary.prototype.value = function value(asRaw) {
asRaw = asRaw == null ? false : asRaw;
asRaw = asRaw == null ? false : asRaw;
// Optimize to serialize for the situation where the data == size of buffer
if(asRaw && typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer) && this.buffer.length == this.position)
return this.buffer;
// If it's a node.js buffer object
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
return asRaw ? this.buffer.slice(0, this.position) : this.buffer.toString('binary', 0, this.position);
@@ -261,7 +261,7 @@ Binary.prototype.toString = function(format) {
/**
* Binary default subtype
* @ignore
* @ignore
*/
var BSON_BINARY_SUBTYPE_DEFAULT = 0;
@@ -274,7 +274,7 @@ var writeStringToArray = function(data) {
// Write the content to the buffer
for(var i = 0; i < data.length; i++) {
buffer[i] = data.charCodeAt(i);
}
}
// Write the string to the buffer
return buffer;
}
@@ -289,50 +289,50 @@ var convertArraytoUtf8BinaryString = function(byteArray, startIndex, endIndex) {
for(var i = startIndex; i < endIndex; i++) {
result = result + String.fromCharCode(byteArray[i]);
}
return result;
return result;
};
Binary.BUFFER_SIZE = 256;
/**
* Default BSON type
*
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_DEFAULT = 0;
/**
* Function BSON type
*
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_FUNCTION = 1;
/**
* Byte Array BSON type
*
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_BYTE_ARRAY = 2;
/**
* OLD UUID BSON type
*
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_UUID_OLD = 3;
/**
* UUID BSON type
*
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_UUID = 4;
/**
* MD5 BSON type
*
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_MD5 = 5;
/**
* User BSON type
*
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_USER_DEFINED = 128;
@@ -341,4 +341,4 @@ Binary.SUBTYPE_USER_DEFINED = 128;
* Expose.
*/
module.exports = Binary;
module.exports.Binary = Binary;
module.exports.Binary = Binary;

File diff suppressed because it is too large Load Diff

View File

@@ -1,776 +0,0 @@
var writeIEEE754 = require('./float_parser').writeIEEE754
, Long = require('./long').Long
, Double = require('./double').Double
, Timestamp = require('./timestamp').Timestamp
, ObjectID = require('./objectid').ObjectID
, Symbol = require('./symbol').Symbol
, Code = require('./code').Code
, MinKey = require('./min_key').MinKey
, MaxKey = require('./max_key').MaxKey
, DBRef = require('./db_ref').DBRef
, Binary = require('./binary').Binary
, BinaryParser = require('./binary_parser').BinaryParser;
// Max Document Buffer size
var buffer = new Buffer(1024 * 1024 * 16);
var checkKey = function checkKey (key, dollarsAndDotsOk) {
if (!key.length) return;
// Check if we have a legal key for the object
if (!!~key.indexOf("\x00")) {
// The BSON spec doesn't allow keys with null bytes because keys are
// null-terminated.
throw Error("key " + key + " must not contain null bytes");
}
if (!dollarsAndDotsOk) {
if('$' == key[0]) {
throw Error("key " + key + " must not start with '$'");
} else if (!!~key.indexOf('.')) {
throw Error("key " + key + " must not contain '.'");
}
}
};
var serializeString = function(key, value, index) {
// Encode String type
buffer[index++] = BSON.BSON_DATA_STRING;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes + 1;
buffer[index - 1] = 0;
// Calculate size
var size = Buffer.byteLength(value) + 1;
// Write the size of the string to buffer
buffer[index + 3] = (size >> 24) & 0xff;
buffer[index + 2] = (size >> 16) & 0xff;
buffer[index + 1] = (size >> 8) & 0xff;
buffer[index] = size & 0xff;
// Ajust the index
index = index + 4;
// Write the string
buffer.write(value, index, 'utf8');
// Update index
index = index + size - 1;
// Write zero
buffer[index++] = 0;
return index;
}
var serializeNumber = function(key, value, index) {
// We have an integer value
if(Math.floor(value) === value && value >= BSON.JS_INT_MIN && value <= BSON.JS_INT_MAX) {
// If the value fits in 32 bits encode as int, if it fits in a double
// encode it as a double, otherwise long
if(value >= BSON.BSON_INT32_MIN && value <= BSON.BSON_INT32_MAX) {
// Set int type 32 bits or less
buffer[index++] = BSON.BSON_DATA_INT;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write the int value
buffer[index++] = value & 0xff;
buffer[index++] = (value >> 8) & 0xff;
buffer[index++] = (value >> 16) & 0xff;
buffer[index++] = (value >> 24) & 0xff;
} else if(value >= BSON.JS_INT_MIN && value <= BSON.JS_INT_MAX) {
// Encode as double
buffer[index++] = BSON.BSON_DATA_NUMBER;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write float
writeIEEE754(buffer, value, index, 'little', 52, 8);
// Ajust index
index = index + 8;
} else {
// Set long type
buffer[index++] = BSON.BSON_DATA_LONG;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
var longVal = Long.fromNumber(value);
var lowBits = longVal.getLowBits();
var highBits = longVal.getHighBits();
// Encode low bits
buffer[index++] = lowBits & 0xff;
buffer[index++] = (lowBits >> 8) & 0xff;
buffer[index++] = (lowBits >> 16) & 0xff;
buffer[index++] = (lowBits >> 24) & 0xff;
// Encode high bits
buffer[index++] = highBits & 0xff;
buffer[index++] = (highBits >> 8) & 0xff;
buffer[index++] = (highBits >> 16) & 0xff;
buffer[index++] = (highBits >> 24) & 0xff;
}
} else {
// Encode as double
buffer[index++] = BSON.BSON_DATA_NUMBER;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write float
writeIEEE754(buffer, value, index, 'little', 52, 8);
// Ajust index
index = index + 8;
}
return index;
}
var serializeUndefined = function(key, value, index) {
// Set long type
buffer[index++] = BSON.BSON_DATA_NULL;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
return index;
}
var serializeBoolean = function(key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_BOOLEAN;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Encode the boolean value
buffer[index++] = value ? 1 : 0;
return index;
}
var serializeDate = function(key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_DATE;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write the date
var dateInMilis = Long.fromNumber(value.getTime());
var lowBits = dateInMilis.getLowBits();
var highBits = dateInMilis.getHighBits();
// Encode low bits
buffer[index++] = lowBits & 0xff;
buffer[index++] = (lowBits >> 8) & 0xff;
buffer[index++] = (lowBits >> 16) & 0xff;
buffer[index++] = (lowBits >> 24) & 0xff;
// Encode high bits
buffer[index++] = highBits & 0xff;
buffer[index++] = (highBits >> 8) & 0xff;
buffer[index++] = (highBits >> 16) & 0xff;
buffer[index++] = (highBits >> 24) & 0xff;
return index;
}
var serializeRegExp = function(key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_REGEXP;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write the regular expression string
buffer.write(value.source, index, 'utf8');
// Adjust the index
index = index + Buffer.byteLength(value.source);
// Write zero
buffer[index++] = 0x00;
// Write the parameters
if(value.global) buffer[index++] = 0x73; // s
if(value.ignoreCase) buffer[index++] = 0x69; // i
if(value.multiline) buffer[index++] = 0x6d; // m
// Add ending zero
buffer[index++] = 0x00;
return index;
}
var serializeMinMax = function(key, value, index) {
// Write the type of either min or max key
if(value === null) {
buffer[index++] = BSON.BSON_DATA_NULL;
} else if(value instanceof MinKey) {
buffer[index++] = BSON.BSON_DATA_MIN_KEY;
} else {
buffer[index++] = BSON.BSON_DATA_MAX_KEY;
}
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
return index;
}
var serializeObjectId = function(key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_OID;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
for(var j = 0; j < 12; j++) {
buffer[index + j] = value.binId[j];
}
// Ajust index
index = index + 12;
return index;
}
var serializeBuffer = function(key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_BINARY;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Get size of the buffer (current write point)
var size = value.length;
// Write the size of the string to buffer
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
// Write the default subtype
buffer[index++] = BSON.BSON_BINARY_SUBTYPE_DEFAULT;
// Copy the content form the binary field to the buffer
value.copy(buffer, index, 0, size);
// Adjust the index
index = index + size;
return index;
}
var serializeObject = function(key, value, index, checkKeys, depth) {
// Write the type
buffer[index++] = Array.isArray(value) ? BSON.BSON_DATA_ARRAY : BSON.BSON_DATA_OBJECT;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
var endIndex = serializeInto(value, checkKeys, index, depth + 1);
// Write size
var size = endIndex - index;
return endIndex;
}
var serializeLong = function(key, value, index) {
// Write the type
buffer[index++] = value instanceof Long ? BSON.BSON_DATA_LONG : BSON.BSON_DATA_TIMESTAMP;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write the date
var lowBits = value.getLowBits();
var highBits = value.getHighBits();
// Encode low bits
buffer[index++] = lowBits & 0xff;
buffer[index++] = (lowBits >> 8) & 0xff;
buffer[index++] = (lowBits >> 16) & 0xff;
buffer[index++] = (lowBits >> 24) & 0xff;
// Encode high bits
buffer[index++] = highBits & 0xff;
buffer[index++] = (highBits >> 8) & 0xff;
buffer[index++] = (highBits >> 16) & 0xff;
buffer[index++] = (highBits >> 24) & 0xff;
return index;
}
var serializeDouble = function(key, value, index) {
// Encode as double
buffer[index++] = BSON.BSON_DATA_NUMBER;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write float
writeIEEE754(buffer, value, index, 'little', 52, 8);
// Ajust index
index = index + 8;
return index;
}
var serializeCode = function(key, value, index, checkKeys, depth) {
if(value.scope != null && Object.keys(value.scope).length > 0) {
// Write the type
buffer[index++] = BSON.BSON_DATA_CODE_W_SCOPE;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Starting index
var startIndex = index;
// Serialize the function
// Get the function string
var functionString = typeof value.code == 'string' ? value.code : value.code.toString();
var codeSize = Buffer.byteLength(functionString) + 1;
// Index adjustment
index = index + 4;
// Write the size of the string to buffer
buffer[index] = codeSize & 0xff;
buffer[index + 1] = (codeSize >> 8) & 0xff;
buffer[index + 2] = (codeSize >> 16) & 0xff;
buffer[index + 3] = (codeSize >> 24) & 0xff;
// Write string into buffer
buffer.write(functionString, index + 4, 'utf8');
// Write end 0
buffer[index + 4 + codeSize - 1] = 0;
// Write the
index = index + codeSize + 4;
//
// Serialize the scope value
var endIndex = serializeInto(value.scope, checkKeys, index, depth + 1)
index = endIndex - 1;
// Writ the total
var totalSize = endIndex - startIndex;
// Write the total size of the object
buffer[startIndex++] = totalSize & 0xff;
buffer[startIndex++] = (totalSize >> 8) & 0xff;
buffer[startIndex++] = (totalSize >> 16) & 0xff;
buffer[startIndex++] = (totalSize >> 24) & 0xff;
// Write trailing zero
buffer[index++] = 0;
} else {
buffer[index++] = BSON.BSON_DATA_CODE;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Function string
var functionString = value.code.toString();
// Function Size
var size = Buffer.byteLength(functionString) + 1;
// Write the size of the string to buffer
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
// Write the string
buffer.write(functionString, index, 'utf8');
// Update index
index = index + size - 1;
// Write zero
buffer[index++] = 0;
}
return index;
}
var serializeBinary = function(key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_BINARY;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Extract the buffer
var data = value.value(true);
// Calculate size
var size = value.position;
// Write the size of the string to buffer
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
// Write the subtype to the buffer
buffer[index++] = value.sub_type;
// If we have binary type 2 the 4 first bytes are the size
if(value.sub_type == Binary.SUBTYPE_BYTE_ARRAY) {
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
}
// Write the data to the object
data.copy(buffer, index, 0, value.position);
// Adjust the index
index = index + value.position;
return index;
}
var serializeSymbol = function(key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_SYMBOL;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Calculate size
var size = Buffer.byteLength(value.value) + 1;
// Write the size of the string to buffer
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
// Write the string
buffer.write(value.value, index, 'utf8');
// Update index
index = index + size - 1;
// Write zero
buffer[index++] = 0x00;
return index;
}
var serializeDBRef = function(key, value, index, depth) {
// Write the type
buffer[index++] = BSON.BSON_DATA_OBJECT;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
var startIndex = index;
var endIndex;
// Serialize object
if(null != value.db) {
endIndex = serializeInto({
'$ref': value.namespace
, '$id' : value.oid
, '$db' : value.db
}, false, index, depth + 1);
} else {
endIndex = serializeInto({
'$ref': value.namespace
, '$id' : value.oid
}, false, index, depth + 1);
}
// Calculate object size
var size = endIndex - startIndex;
// Write the size
buffer[startIndex++] = size & 0xff;
buffer[startIndex++] = (size >> 8) & 0xff;
buffer[startIndex++] = (size >> 16) & 0xff;
buffer[startIndex++] = (size >> 24) & 0xff;
// Set index
return endIndex;
}
var BSON = function() {
this.buffer = buffer;
}
BSON.prototype.serialize = function serialize(object, checkKeys, index) {
var finishedBuffer = new Buffer(serializeInto(object, checkKeys, index || 0, 0));
this.buffer.copy(finishedBuffer, 0, 0, finishedBuffer.length);
return finishedBuffer;
}
var serializeInto = function serializeInto(object, checkKeys, startingIndex, depth) {
startingIndex = startingIndex || 0;
// Start place to serialize into
var index = startingIndex + 4;
var self = this;
// Special case isArray
if(Array.isArray(object)) {
// Get object keys
for(var i = 0; i < object.length; i++) {
var key = "" + i;
var type = typeof object[i];
// Check the key and throw error if it's illegal
if(key != '$db' && key != '$ref' && key != '$id') {
checkKey(key, !checkKeys);
}
if(type == 'string') {
index = serializeString(key, object[i], index);
} else if(type == 'number') {
index = serializeNumber(key, object[i], index);
} else if(type == 'undefined') {
index = serializeUndefined(key, object[i], index);
} else if(type == 'boolean') {
index = serializeBoolean(key, object[i], index);
} else if(object[i] instanceof Date) {
index = serializeDate(key, object[i], index);
} else if(object[i] instanceof RegExp || Object.prototype.toString.call(object[i]) === '[object RegExp]') {
index = serializeRegExp(key, object[i], index);
} else if(object[i]['_bsontype'] == 'MinKey' || object[i]['_bsontype'] == 'MaxKey') {
index = serializeMinMax(key, object[i], index);
} else if(object[i]['_bsontype'] == 'ObjectID') {
index = serializeObjectId(key, object[i], index);
} else if(Buffer.isBuffer(object[i])) {
index = serializeBuffer(key, object[i], index);
} else if(type == 'object' && object[i]['_bsontype'] == null) {
index = serializeObject(key, object[i], index, checkKeys, depth);
} else if(object[i]['_bsontype'] == 'Long' || object[i]['_bsontype'] == 'Timestamp') {
index = serializeLong(key, object[i], index);
} else if(object[i]['_bsontype'] == 'Double') {
index = serializeDouble(key, object[i], index);
} else if(object[i]['_bsontype'] == 'Code') {
index = serializeCode(key, object[i], index, checkKeys, depth);
} else if(object[i]['_bsontype'] == 'Binary') {
index = serializeBinary(key, object[i], index);
} else if(object[i]['_bsontype'] == 'Symbol') {
index = serializeSymbol(key, object[i], index);
} else if(object[i]['_bsontype'] == 'DBRef') {
index = serializeDBRef(key, object[i], index, depth);
}
}
} else {
var keys = Object.keys(object);
for(var i = 0; i < keys.length; i++) {
var key = keys[i];
var type = typeof object[key];
// Check the key and throw error if it's illegal
if(key != '$db' && key != '$ref' && key != '$id') {
checkKey(key, !checkKeys);
}
if(type == 'string') {
index = serializeString(key, object[key], index);
} else if(type == 'number') {
index = serializeNumber(key, object[key], index);
} else if(type == 'undefined') {
index = serializeUndefined(key, object[key], index);
} else if(type == 'boolean') {
index = serializeBoolean(key, object[key], index);
} else if(object[key] instanceof Date) {
index = serializeDate(key, object[key], index);
} else if(object[key] instanceof RegExp || Object.prototype.toString.call(object[key]) === '[object RegExp]') {
index = serializeRegExp(key, object[key], index);
} else if(object[key]['_bsontype'] == 'MinKey' || object[key]['_bsontype'] == 'MaxKey') {
index = serializeMinMax(key, object[key], index);
} else if(object[key]['_bsontype'] == 'ObjectID') {
index = serializeObjectId(key, object[key], index);
} else if(Buffer.isBuffer(object[key])) {
index = serializeBuffer(key, object[key], index);
} else if(type == 'object' && object[key]['_bsontype'] == null) {
index = serializeObject(key, object[key], index, checkKeys, depth);
} else if(object[key]['_bsontype'] == 'Long' || object[key]['_bsontype'] == 'Timestamp') {
index = serializeLong(key, object[key], index);
} else if(object[key]['_bsontype'] == 'Double') {
index = serializeDouble(key, object[key], index);
} else if(object[key]['_bsontype'] == 'Code') {
index = serializeCode(key, object[key], index, checkKeys, depth);
} else if(object[key]['_bsontype'] == 'Binary') {
index = serializeBinary(key, object[key], index);
} else if(object[key]['_bsontype'] == 'Symbol') {
index = serializeSymbol(key, object[key], index);
} else if(object[key]['_bsontype'] == 'DBRef') {
index = serializeDBRef(key, object[key], index, depth);
}
}
}
// Final padding byte for object
buffer[index++] = 0x00;
// Final size
var size = index - startingIndex;
// Write the size of the object
buffer[startingIndex++] = size & 0xff;
buffer[startingIndex++] = (size >> 8) & 0xff;
buffer[startingIndex++] = (size >> 16) & 0xff;
buffer[startingIndex++] = (size >> 24) & 0xff;
return index;
}
/**
* @ignore
* @api private
*/
// BSON MAX VALUES
BSON.BSON_INT32_MAX = 0x7FFFFFFF;
BSON.BSON_INT32_MIN = -0x80000000;
BSON.BSON_INT64_MAX = Math.pow(2, 63) - 1;
BSON.BSON_INT64_MIN = -Math.pow(2, 63);
// JS MAX PRECISE VALUES
BSON.JS_INT_MAX = 0x20000000000000; // Any integer up to 2^53 can be precisely represented by a double.
BSON.JS_INT_MIN = -0x20000000000000; // Any integer down to -2^53 can be precisely represented by a double.
// Internal long versions
var JS_INT_MAX_LONG = Long.fromNumber(0x20000000000000); // Any integer up to 2^53 can be precisely represented by a double.
var JS_INT_MIN_LONG = Long.fromNumber(-0x20000000000000); // Any integer down to -2^53 can be precisely represented by a double.
/**
* Number BSON Type
*
* @classconstant BSON_DATA_NUMBER
**/
BSON.BSON_DATA_NUMBER = 1;
/**
* String BSON Type
*
* @classconstant BSON_DATA_STRING
**/
BSON.BSON_DATA_STRING = 2;
/**
* Object BSON Type
*
* @classconstant BSON_DATA_OBJECT
**/
BSON.BSON_DATA_OBJECT = 3;
/**
* Array BSON Type
*
* @classconstant BSON_DATA_ARRAY
**/
BSON.BSON_DATA_ARRAY = 4;
/**
* Binary BSON Type
*
* @classconstant BSON_DATA_BINARY
**/
BSON.BSON_DATA_BINARY = 5;
/**
* ObjectID BSON Type
*
* @classconstant BSON_DATA_OID
**/
BSON.BSON_DATA_OID = 7;
/**
* Boolean BSON Type
*
* @classconstant BSON_DATA_BOOLEAN
**/
BSON.BSON_DATA_BOOLEAN = 8;
/**
* Date BSON Type
*
* @classconstant BSON_DATA_DATE
**/
BSON.BSON_DATA_DATE = 9;
/**
* null BSON Type
*
* @classconstant BSON_DATA_NULL
**/
BSON.BSON_DATA_NULL = 10;
/**
* RegExp BSON Type
*
* @classconstant BSON_DATA_REGEXP
**/
BSON.BSON_DATA_REGEXP = 11;
/**
* Code BSON Type
*
* @classconstant BSON_DATA_CODE
**/
BSON.BSON_DATA_CODE = 13;
/**
* Symbol BSON Type
*
* @classconstant BSON_DATA_SYMBOL
**/
BSON.BSON_DATA_SYMBOL = 14;
/**
* Code with Scope BSON Type
*
* @classconstant BSON_DATA_CODE_W_SCOPE
**/
BSON.BSON_DATA_CODE_W_SCOPE = 15;
/**
* 32 bit Integer BSON Type
*
* @classconstant BSON_DATA_INT
**/
BSON.BSON_DATA_INT = 16;
/**
* Timestamp BSON Type
*
* @classconstant BSON_DATA_TIMESTAMP
**/
BSON.BSON_DATA_TIMESTAMP = 17;
/**
* Long BSON Type
*
* @classconstant BSON_DATA_LONG
**/
BSON.BSON_DATA_LONG = 18;
/**
* MinKey BSON Type
*
* @classconstant BSON_DATA_MIN_KEY
**/
BSON.BSON_DATA_MIN_KEY = 0xff;
/**
* MaxKey BSON Type
*
* @classconstant BSON_DATA_MAX_KEY
**/
BSON.BSON_DATA_MAX_KEY = 0x7f;
/**
* Binary Default Type
*
* @classconstant BSON_BINARY_SUBTYPE_DEFAULT
**/
BSON.BSON_BINARY_SUBTYPE_DEFAULT = 0;
/**
* Binary Function Type
*
* @classconstant BSON_BINARY_SUBTYPE_FUNCTION
**/
BSON.BSON_BINARY_SUBTYPE_FUNCTION = 1;
/**
* Binary Byte Array Type
*
* @classconstant BSON_BINARY_SUBTYPE_BYTE_ARRAY
**/
BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY = 2;
/**
* Binary UUID Type
*
* @classconstant BSON_BINARY_SUBTYPE_UUID
**/
BSON.BSON_BINARY_SUBTYPE_UUID = 3;
/**
* Binary MD5 Type
*
* @classconstant BSON_BINARY_SUBTYPE_MD5
**/
BSON.BSON_BINARY_SUBTYPE_MD5 = 4;
/**
* Binary User Defined Type
*
* @classconstant BSON_BINARY_SUBTYPE_USER_DEFINED
**/
BSON.BSON_BINARY_SUBTYPE_USER_DEFINED = 128;
// Return BSON
exports.BSON = BSON;

View File

@@ -1,18 +1,19 @@
try {
exports.BSONPure = require('./bson');
exports.BSONNative = require('bson-ext');
exports.BSONNative = require('./bson');
} catch(err) {
// do nothing
}
[ './binary_parser'
, './binary'
, './code'
, './map'
, './db_ref'
, './double'
, './max_key'
, './min_key'
, './objectid'
, './regexp'
, './symbol'
, './timestamp'
, './long'].forEach(function (path) {
@@ -29,11 +30,13 @@ exports.pure = function() {
[ './binary_parser'
, './binary'
, './code'
, './map'
, './db_ref'
, './double'
, './max_key'
, './min_key'
, './objectid'
, './regexp'
, './symbol'
, './timestamp'
, './long'
@@ -54,11 +57,13 @@ exports.native = function() {
[ './binary_parser'
, './binary'
, './code'
, './map'
, './db_ref'
, './double'
, './max_key'
, './min_key'
, './objectid'
, './regexp'
, './symbol'
, './timestamp'
, './long'
@@ -68,10 +73,10 @@ exports.native = function() {
classes[i] = module[i];
}
});
// Catch error and return no classes found
try {
classes['BSON'] = require('bson-ext')
classes['BSON'] = require('./bson');
} catch(err) {
return exports.pure();
}

126
node_modules/mongoose/node_modules/bson/lib/bson/map.js generated vendored Normal file
View File

@@ -0,0 +1,126 @@
"use strict"
// We have an ES6 Map available, return the native instance
if(typeof global.Map !== 'undefined') {
module.exports = global.Map;
module.exports.Map = global.Map;
} else {
// We will return a polyfill
var Map = function(array) {
this._keys = [];
this._values = {};
for(var i = 0; i < array.length; i++) {
if(array[i] == null) continue; // skip null and undefined
var entry = array[i];
var key = entry[0];
var value = entry[1];
// Add the key to the list of keys in order
this._keys.push(key);
// Add the key and value to the values dictionary with a point
// to the location in the ordered keys list
this._values[key] = {v: value, i: this._keys.length - 1};
}
}
Map.prototype.clear = function() {
this._keys = [];
this._values = {};
}
Map.prototype.delete = function(key) {
var value = this._values[key];
if(value == null) return false;
// Delete entry
delete this._values[key];
// Remove the key from the ordered keys list
this._keys.splice(value.i, 1);
return true;
}
Map.prototype.entries = function() {
var self = this;
var index = 0;
return {
next: function() {
var key = self._keys[index++];
return {
value: key !== undefined ? [key, self._values[key].v] : undefined,
done: key !== undefined ? false : true
}
}
};
}
Map.prototype.forEach = function(callback, self) {
self = self || this;
for(var i = 0; i < this._keys.length; i++) {
var key = this._keys[i];
// Call the forEach callback
callback.call(self, this._values[key].v, key, self);
}
}
Map.prototype.get = function(key) {
return this._values[key] ? this._values[key].v : undefined;
}
Map.prototype.has = function(key) {
return this._values[key] != null;
}
Map.prototype.keys = function(key) {
var self = this;
var index = 0;
return {
next: function() {
var key = self._keys[index++];
return {
value: key !== undefined ? key : undefined,
done: key !== undefined ? false : true
}
}
};
}
Map.prototype.set = function(key, value) {
if(this._values[key]) {
this._values[key].v = value;
return this;
}
// Add the key to the list of keys in order
this._keys.push(key);
// Add the key and value to the values dictionary with a point
// to the location in the ordered keys list
this._values[key] = {v: value, i: this._keys.length - 1};
return this;
}
Map.prototype.values = function(key, value) {
var self = this;
var index = 0;
return {
next: function() {
var key = self._keys[index++];
return {
value: key !== undefined ? self._values[key].v : undefined,
done: key !== undefined ? false : true
}
}
};
}
// Last ismaster
Object.defineProperty(Map.prototype, 'size', {
enumerable:true,
get: function() { return this._keys.length; }
});
module.exports = Map;
module.exports.Map = Map;
}

View File

@@ -110,7 +110,7 @@ ObjectID.prototype.generate = function(time) {
if ('number' != typeof time) {
time = parseInt(Date.now()/1000,10);
}
var time4Bytes = BinaryParser.encodeInt(time, 32, true, true);
/* for time-based ObjectID the bytes following the time will be zeroed */
var machine3Bytes = BinaryParser.encodeInt(MACHINE_ID, 24, false);
@@ -156,10 +156,15 @@ ObjectID.prototype.toJSON = function() {
* @return {boolean} the result of comparing two ObjectID's
*/
ObjectID.prototype.equals = function equals (otherID) {
if(otherID == null) return false;
var id = (otherID instanceof ObjectID || otherID.toHexString)
? otherID.id
: ObjectID.createFromHexString(otherID).id;
var id;
if(otherID != null && (otherID instanceof ObjectID || otherID.toHexString)) {
id = otherID.id;
} else if(typeof otherID == 'string' && ObjectID.isValid(otherID)) {
id = ObjectID.createFromHexString(otherID).id;
} else {
return false;
}
return this.id === id;
}
@@ -241,13 +246,12 @@ ObjectID.createFromHexString = function createFromHexString (hexString) {
ObjectID.isValid = function isValid(id) {
if(id == null) return false;
if(id != null && 'number' != typeof id && (id.length != 12 && id.length != 24)) {
return false;
} else {
// Check specifically for hex correctness
if(typeof id == 'string' && id.length == 24) return checkForHexRegExp.test(id);
if(typeof id == 'number')
return true;
if(typeof id == 'string') {
return id.length == 12 || (id.length == 24 && checkForHexRegExp.test(id));
}
return false;
};
/**
@@ -271,4 +275,4 @@ Object.defineProperty(ObjectID.prototype, "generationTime", {
*/
module.exports = ObjectID;
module.exports.ObjectID = ObjectID;
module.exports.ObjectId = ObjectID;
module.exports.ObjectId = ObjectID;

View File

@@ -7,6 +7,7 @@ var writeIEEE754 = require('../float_parser').writeIEEE754
, Timestamp = require('../timestamp').Timestamp
, ObjectID = require('../objectid').ObjectID
, Symbol = require('../symbol').Symbol
, BSONRegExp = require('../regexp').BSONRegExp
, Code = require('../code').Code
, MinKey = require('../min_key').MinKey
, MaxKey = require('../max_key').MaxKey
@@ -18,12 +19,12 @@ var isDate = function isDate(d) {
return typeof d === 'object' && Object.prototype.toString.call(d) === '[object Date]';
}
var calculateObjectSize = function calculateObjectSize(object, serializeFunctions) {
var calculateObjectSize = function calculateObjectSize(object, serializeFunctions, ignoreUndefined) {
var totalLength = (4 + 1);
if(Array.isArray(object)) {
for(var i = 0; i < object.length; i++) {
totalLength += calculateElement(i.toString(), object[i], serializeFunctions)
totalLength += calculateElement(i.toString(), object[i], serializeFunctions, true, ignoreUndefined)
}
} else {
// If we have toBSON defined, override the current object
@@ -33,7 +34,7 @@ var calculateObjectSize = function calculateObjectSize(object, serializeFunction
// Calculate size
for(var key in object) {
totalLength += calculateElement(key, object[key], serializeFunctions)
totalLength += calculateElement(key, object[key], serializeFunctions, false, ignoreUndefined)
}
}
@@ -44,7 +45,7 @@ var calculateObjectSize = function calculateObjectSize(object, serializeFunction
* @ignore
* @api private
*/
function calculateElement(name, value, serializeFunctions) {
function calculateElement(name, value, serializeFunctions, isArray, ignoreUndefined) {
// If we have toBSON defined, override the current object
if(value && value.toBSON){
value = value.toBSON();
@@ -64,7 +65,8 @@ function calculateElement(name, value, serializeFunctions) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (8 + 1);
}
case 'undefined':
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (1);
if(isArray || !ignoreUndefined) return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (1);
return 0;
case 'boolean':
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + (1 + 1);
case 'object':
@@ -82,7 +84,7 @@ function calculateElement(name, value, serializeFunctions) {
} else if(value instanceof Code || value['_bsontype'] == 'Code') {
// Calculate size depending on the availability of a scope
if(value.scope != null && Object.keys(value.scope).length > 0) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + 4 + Buffer.byteLength(value.code.toString(), 'utf8') + 1 + calculateObjectSize(value.scope, serializeFunctions);
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + 4 + Buffer.byteLength(value.code.toString(), 'utf8') + 1 + calculateObjectSize(value.scope, serializeFunctions, ignoreUndefined);
} else {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + Buffer.byteLength(value.code.toString(), 'utf8') + 1;
}
@@ -107,12 +109,15 @@ function calculateElement(name, value, serializeFunctions) {
ordered_values['$db'] = value.db;
}
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + calculateObjectSize(ordered_values, serializeFunctions);
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + calculateObjectSize(ordered_values, serializeFunctions, ignoreUndefined);
} else if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + Buffer.byteLength(value.source, 'utf8') + 1
+ (value.global ? 1 : 0) + (value.ignoreCase ? 1 : 0) + (value.multiline ? 1 : 0) + 1
} else if(value instanceof BSONRegExp || value['_bsontype'] == 'BSONRegExp') {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + Buffer.byteLength(value.pattern, 'utf8') + 1
+ Buffer.byteLength(value.options, 'utf8') + 1
} else {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + calculateObjectSize(value, serializeFunctions) + 1;
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + calculateObjectSize(value, serializeFunctions, ignoreUndefined) + 1;
}
case 'function':
// WTF for 0.4.X where typeof /someregexp/ === 'function'
@@ -121,7 +126,7 @@ function calculateElement(name, value, serializeFunctions) {
+ (value.global ? 1 : 0) + (value.ignoreCase ? 1 : 0) + (value.multiline ? 1 : 0) + 1
} else {
if(serializeFunctions && value.scope != null && Object.keys(value.scope).length > 0) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + 4 + Buffer.byteLength(value.toString(), 'utf8') + 1 + calculateObjectSize(value.scope, serializeFunctions);
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + 4 + Buffer.byteLength(value.toString(), 'utf8') + 1 + calculateObjectSize(value.scope, serializeFunctions, ignoreUndefined);
} else if(serializeFunctions) {
return (name != null ? (Buffer.byteLength(name, 'utf8') + 1) : 0) + 1 + 4 + Buffer.byteLength(value.toString(), 'utf8') + 1;
}

View File

@@ -12,6 +12,7 @@ var writeIEEE754 = require('../float_parser').writeIEEE754,
MinKey = require('../min_key').MinKey,
MaxKey = require('../max_key').MaxKey,
DBRef = require('../db_ref').DBRef,
BSONRegExp = require('../regexp').BSONRegExp,
Binary = require('../binary').Binary;
var deserialize = function(buffer, options, isArray) {
@@ -19,13 +20,38 @@ var deserialize = function(buffer, options, isArray) {
// Read the document size
var size = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
// Ensure buffer is valid size
if(size < 5 || size != buffer.length) throw new Error("corrupt bson message");
// Ensure buffer is valid size
if(size < 5 || buffer.length < size) {
throw new Error("corrupt bson message");
}
// Illegal end value
if(buffer[size - 1] != 0) {
throw new Error("One object, sized correctly, with a spot for an EOO, but the EOO isn't 0x00");
}
// Start deserializtion
return deserializeObject(buffer, options, isArray);
}
// Reads in a C style string
var readCStyleStringSpecial = function(buffer, index) {
// Get the start search index
var i = index;
// Locate the end of the c string
while(buffer[i] !== 0x00 && i < buffer.length) {
i++
}
// If are at the end of the buffer there is a problem with the document
if(i >= buffer.length) throw new Error("Bad BSON Document: illegal CString")
// Grab utf8 encoded string
var string = buffer.toString('utf8', index, i);
// Update index position
index = i + 1;
// Return string
return {s: string, i: index};
}
var deserializeObject = function(buffer, options, isArray) {
// Options
options = options == null ? {} : options;
@@ -33,29 +59,16 @@ var deserializeObject = function(buffer, options, isArray) {
var cacheFunctions = options['cacheFunctions'] == null ? false : options['cacheFunctions'];
var cacheFunctionsCrc32 = options['cacheFunctionsCrc32'] == null ? false : options['cacheFunctionsCrc32'];
var promoteLongs = options['promoteLongs'] == null ? true : options['promoteLongs'];
var fieldsAsRaw = options['fieldsAsRaw'] == null ? {} : options['fieldsAsRaw'];
// Return BSONRegExp objects instead of native regular expressions
var bsonRegExp = typeof options['bsonRegExp'] == 'boolean' ? options['bsonRegExp'] : false;
var promoteBuffers = options['promoteBuffers'] == null ? false : options['promoteBuffers'];
// Validate that we have at least 4 bytes of buffer
if(buffer.length < 5) throw new Error("corrupt bson message < 5 bytes long");
// Set up index
var index = typeof options['index'] == 'number' ? options['index'] : 0;
// Reads in a C style string
var readCStyleString = function() {
// Get the start search index
var i = index;
// Locate the end of the c string
while(buffer[i] !== 0x00 && i < buffer.length) {
i++
}
// If are at the end of the buffer there is a problem with the document
if(i >= buffer.length) throw new Error("Bad BSON Document: illegal CString")
// Grab utf8 encoded string
var string = buffer.toString('utf8', index, i);
// Update index position
index = i + 1;
// Return string
return string;
}
// Read the document size
var size = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
@@ -73,7 +86,9 @@ var deserializeObject = function(buffer, options, isArray) {
// If we get a zero it's the last byte, exit
if(elementType == 0) break;
// Read the name of the field
var name = readCStyleString();
var r = readCStyleStringSpecial(buffer, index);
var name = r.s;
index = r.i;
// Switch on the type
if(elementType == BSON.BSON_DATA_OID) {
@@ -122,8 +137,13 @@ var deserializeObject = function(buffer, options, isArray) {
if(subType == Binary.SUBTYPE_BYTE_ARRAY) {
binarySize = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;
}
// Slice the data
object[name] = new Binary(buffer.slice(index, index + binarySize), subType);
if(promoteBuffers) {
// assign reference to sliced Buffer object
object[name] = buffer.slice(index, index + binarySize);
} else {
// Slice the data
object[name] = new Binary(buffer.slice(index, index + binarySize), subType);
}
} else {
var _buffer = typeof Uint8Array != 'undefined' ? new Uint8Array(new ArrayBuffer(binarySize)) : new Array(binarySize);
// If we have subtype 2 skip the 4 bytes for the size
@@ -134,8 +154,13 @@ var deserializeObject = function(buffer, options, isArray) {
for(var i = 0; i < binarySize; i++) {
_buffer[i] = buffer[index + i];
}
// Create the binary object
object[name] = new Binary(_buffer, subType);
if(promoteBuffers) {
// assign reference to Buffer object
object[name] = _buffer;
} else {
// Create the binary object
object[name] = new Binary(_buffer, subType);
}
}
// Update the index
index = index + binarySize;
@@ -143,8 +168,17 @@ var deserializeObject = function(buffer, options, isArray) {
options['index'] = index;
// Decode the size of the array document
var objectSize = buffer[index] | buffer[index + 1] << 8 | buffer[index + 2] << 16 | buffer[index + 3] << 24;
var arrayOptions = options;
// All elements of array to be returned as raw bson
if(fieldsAsRaw[name]) {
arrayOptions = {};
for(var n in options) arrayOptions[n] = options[n];
arrayOptions['raw'] = true;
}
// Set the array to the object
object[name] = deserializeObject(buffer, options, true);
object[name] = deserializeObject(buffer, arrayOptions, true);
// Adjust the index
index = index + objectSize;
} else if(elementType == BSON.BSON_DATA_OBJECT) {
@@ -153,14 +187,28 @@ var deserializeObject = function(buffer, options, isArray) {
var objectSize = buffer[index] | buffer[index + 1] << 8 | buffer[index + 2] << 16 | buffer[index + 3] << 24;
// Validate if string Size is larger than the actual provided buffer
if(objectSize <= 0 || objectSize > (buffer.length - index)) throw new Error("bad embedded document length in bson");
// Set the array to the object
object[name] = deserializeObject(buffer, options, false);
// We have a raw value
if(options['raw']) {
// Set the array to the object
object[name] = buffer.slice(index, index + objectSize);
} else {
// Set the array to the object
object[name] = deserializeObject(buffer, options, false);
}
// Adjust the index
index = index + objectSize;
} else if(elementType == BSON.BSON_DATA_REGEXP) {
} else if(elementType == BSON.BSON_DATA_REGEXP && bsonRegExp == false) {
// Create the regexp
var source = readCStyleString();
var regExpOptions = readCStyleString();
var r = readCStyleStringSpecial(buffer, index);
var source = r.s;
index = r.i;
var r = readCStyleStringSpecial(buffer, index);
var regExpOptions = r.s;
index = r.i;
// For each option add the corresponding one for javascript
var optionsArray = new Array(regExpOptions.length);
@@ -180,6 +228,18 @@ var deserializeObject = function(buffer, options, isArray) {
}
object[name] = new RegExp(source, optionsArray.join(''));
} else if(elementType == BSON.BSON_DATA_REGEXP && bsonRegExp == true) {
// Create the regexp
var r = readCStyleStringSpecial(buffer, index);
var source = r.s;
index = r.i;
var r = readCStyleStringSpecial(buffer, index);
var regExpOptions = r.s;
index = r.i;
// Set the object
object[name] = new BSONRegExp(source, regExpOptions);
} else if(elementType == BSON.BSON_DATA_LONG) {
// Unpack the low and high bits
var lowBits = buffer[index++] | buffer[index++] << 8 | buffer[index++] << 16 | buffer[index++] << 24;

View File

@@ -1,39 +1,30 @@
"use strict"
var writeIEEE754 = require('../float_parser').writeIEEE754
, readIEEE754 = require('../float_parser').readIEEE754
, Long = require('../long').Long
, Double = require('../double').Double
, Timestamp = require('../timestamp').Timestamp
, ObjectID = require('../objectid').ObjectID
, Symbol = require('../symbol').Symbol
, Code = require('../code').Code
, MinKey = require('../min_key').MinKey
, MaxKey = require('../max_key').MaxKey
, DBRef = require('../db_ref').DBRef
, Binary = require('../binary').Binary;
var writeIEEE754 = require('../float_parser').writeIEEE754,
readIEEE754 = require('../float_parser').readIEEE754,
Long = require('../long').Long,
Map = require('../map'),
Double = require('../double').Double,
Timestamp = require('../timestamp').Timestamp,
ObjectID = require('../objectid').ObjectID,
Symbol = require('../symbol').Symbol,
Code = require('../code').Code,
BSONRegExp = require('../regexp').BSONRegExp,
MinKey = require('../min_key').MinKey,
MaxKey = require('../max_key').MaxKey,
DBRef = require('../db_ref').DBRef,
Binary = require('../binary').Binary;
var regexp = /\x00/
// To ensure that 0.4 of node works correctly
var isDate = function isDate(d) {
return typeof d === 'object' && Object.prototype.toString.call(d) === '[object Date]';
}
var checkKey = function checkKey (key, dollarsAndDotsOk) {
if (!key.length) return;
// Check if we have a legal key for the object
if (!!~key.indexOf("\x00")) {
// The BSON spec doesn't allow keys with null bytes because keys are
// null-terminated.
throw Error("key " + key + " must not contain null bytes");
}
if (!dollarsAndDotsOk) {
if('$' == key[0]) {
throw Error("key " + key + " must not start with '$'");
} else if (!!~key.indexOf('.')) {
throw Error("key " + key + " must not contain '.'");
}
}
};
var isRegExp = function isRegExp(d) {
return Object.prototype.toString.call(d) === '[object RegExp]';
}
var serializeString = function(buffer, key, value, index) {
// Encode String type
@@ -43,20 +34,15 @@ var serializeString = function(buffer, key, value, index) {
// Encode the name
index = index + numberOfWrittenBytes + 1;
buffer[index - 1] = 0;
// Calculate size
var size = Buffer.byteLength(value) + 1;
// Write the size of the string to buffer
buffer[index + 3] = (size >> 24) & 0xff;
buffer[index + 2] = (size >> 16) & 0xff;
buffer[index + 1] = (size >> 8) & 0xff;
buffer[index] = size & 0xff;
// Ajust the index
index = index + 4;
// Write the string
buffer.write(value, index, 'utf8');
var size = buffer.write(value, index + 4, 'utf8');
// Write the size of the string to buffer
buffer[index + 3] = (size + 1 >> 24) & 0xff;
buffer[index + 2] = (size + 1 >> 16) & 0xff;
buffer[index + 1] = (size + 1 >> 8) & 0xff;
buffer[index] = size + 1 & 0xff;
// Update index
index = index + size - 1;
index = index + 4 + size;
// Write zero
buffer[index++] = 0;
return index;
@@ -128,7 +114,7 @@ var serializeNumber = function(buffer, key, value, index) {
index = index + 8;
}
return index;
return index;
}
var serializeUndefined = function(buffer, key, value, index) {
@@ -139,7 +125,7 @@ var serializeUndefined = function(buffer, key, value, index) {
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
return index;
return index;
}
var serializeBoolean = function(buffer, key, value, index) {
@@ -152,7 +138,7 @@ var serializeBoolean = function(buffer, key, value, index) {
buffer[index++] = 0;
// Encode the boolean value
buffer[index++] = value ? 1 : 0;
return index;
return index;
}
var serializeDate = function(buffer, key, value, index) {
@@ -178,7 +164,7 @@ var serializeDate = function(buffer, key, value, index) {
buffer[index++] = (highBits >> 8) & 0xff;
buffer[index++] = (highBits >> 16) & 0xff;
buffer[index++] = (highBits >> 24) & 0xff;
return index;
return index;
}
var serializeRegExp = function(buffer, key, value, index) {
@@ -189,11 +175,11 @@ var serializeRegExp = function(buffer, key, value, index) {
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write the regular expression string
buffer.write(value.source, index, 'utf8');
if (value.source && value.source.match(regexp) != null) {
throw Error("value " + value.source + " must not contain null bytes");
}
// Adjust the index
index = index + Buffer.byteLength(value.source);
index = index + buffer.write(value.source, index, 'utf8');
// Write zero
buffer[index++] = 0x00;
// Write the parameters
@@ -202,7 +188,26 @@ var serializeRegExp = function(buffer, key, value, index) {
if(value.multiline) buffer[index++] = 0x6d; // m
// Add ending zero
buffer[index++] = 0x00;
return index;
return index;
}
var serializeBSONRegExp = function(buffer, key, value, index) {
// Write the type
buffer[index++] = BSON.BSON_DATA_REGEXP;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Adjust the index
index = index + buffer.write(value.pattern, index, 'utf8');
// Write zero
buffer[index++] = 0x00;
// Write the options
index = index + buffer.write(value.options, index, 'utf8');
// Add ending zero
buffer[index++] = 0x00;
return index;
}
var serializeMinMax = function(buffer, key, value, index) {
@@ -229,15 +234,15 @@ var serializeObjectId = function(buffer, key, value, index) {
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Write the objectId into the shared buffer
buffer.write(value.id, index, 'binary')
// Write the objectId into the shared buffer
buffer.write(value.id, index, 'binary')
// Ajust index
return index + 12;
return index + 12;
}
var serializeBuffer = function(buffer, key, value, index) {
@@ -261,10 +266,10 @@ var serializeBuffer = function(buffer, key, value, index) {
value.copy(buffer, index, 0, size);
// Adjust the index
index = index + size;
return index;
return index;
}
var serializeObject = function(buffer, key, value, index, checkKeys, depth, serializeFunctions) {
var serializeObject = function(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined) {
// Write the type
buffer[index++] = Array.isArray(value) ? BSON.BSON_DATA_ARRAY : BSON.BSON_DATA_OBJECT;
// Number of written bytes
@@ -272,7 +277,7 @@ var serializeObject = function(buffer, key, value, index, checkKeys, depth, seri
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
var endIndex = serializeInto(buffer, value, checkKeys, index, depth + 1, serializeFunctions);
var endIndex = serializeInto(buffer, value, checkKeys, index, depth + 1, serializeFunctions, ignoreUndefined);
// Write size
var size = endIndex - index;
return endIndex;
@@ -299,7 +304,7 @@ var serializeLong = function(buffer, key, value, index) {
buffer[index++] = (highBits >> 8) & 0xff;
buffer[index++] = (highBits >> 16) & 0xff;
buffer[index++] = (highBits >> 24) & 0xff;
return index;
return index;
}
var serializeDouble = function(buffer, key, value, index) {
@@ -314,35 +319,33 @@ var serializeDouble = function(buffer, key, value, index) {
writeIEEE754(buffer, value, index, 'little', 52, 8);
// Ajust index
index = index + 8;
return index;
return index;
}
var serializeFunction = function(buffer, key, value, index, checkKeys, depth) {
buffer[index++] = BSON.BSON_DATA_CODE;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Function string
var functionString = value.toString();
// Function Size
var size = Buffer.byteLength(functionString) + 1;
// Write the size of the string to buffer
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
// Write the string
buffer.write(functionString, index, 'utf8');
// Update index
index = index + size - 1;
// Write zero
buffer[index++] = 0;
return index;
buffer[index++] = BSON.BSON_DATA_CODE;
// Number of written bytes
var numberOfWrittenBytes = buffer.write(key, index, 'utf8');
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Function string
var functionString = value.toString();
// Write the string
var size = buffer.write(functionString, index + 4, 'utf8') + 1;
// Write the size of the string to buffer
buffer[index] = size & 0xff;
buffer[index + 1] = (size >> 8) & 0xff;
buffer[index + 2] = (size >> 16) & 0xff;
buffer[index + 3] = (size >> 24) & 0xff;
// Update index
index = index + 4 + size - 1;
// Write zero
buffer[index++] = 0;
return index;
}
var serializeCode = function(buffer, key, value, index, checkKeys, depth, serializeFunctions) {
var serializeCode = function(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined) {
if(value.scope != null && Object.keys(value.scope).length > 0) {
// Write the type
buffer[index++] = BSON.BSON_DATA_CODE_W_SCOPE;
@@ -358,16 +361,15 @@ var serializeCode = function(buffer, key, value, index, checkKeys, depth, serial
// Serialize the function
// Get the function string
var functionString = typeof value.code == 'string' ? value.code : value.code.toString();
var codeSize = Buffer.byteLength(functionString) + 1;
// Index adjustment
index = index + 4;
// Write string into buffer
var codeSize = buffer.write(functionString, index + 4, 'utf8') + 1;
// Write the size of the string to buffer
buffer[index] = codeSize & 0xff;
buffer[index + 1] = (codeSize >> 8) & 0xff;
buffer[index + 2] = (codeSize >> 16) & 0xff;
buffer[index + 3] = (codeSize >> 24) & 0xff;
// Write string into buffer
buffer.write(functionString, index + 4, 'utf8');
// Write end 0
buffer[index + 4 + codeSize - 1] = 0;
// Write the
@@ -375,7 +377,7 @@ var serializeCode = function(buffer, key, value, index, checkKeys, depth, serial
//
// Serialize the scope value
var endIndex = serializeInto(buffer, value.scope, checkKeys, index, depth + 1, serializeFunctions)
var endIndex = serializeInto(buffer, value.scope, checkKeys, index, depth + 1, serializeFunctions, ignoreUndefined)
index = endIndex - 1;
// Writ the total
@@ -397,22 +399,20 @@ var serializeCode = function(buffer, key, value, index, checkKeys, depth, serial
buffer[index++] = 0;
// Function string
var functionString = value.code.toString();
// Function Size
var size = Buffer.byteLength(functionString) + 1;
// Write the size of the string to buffer
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
// Write the string
buffer.write(functionString, index, 'utf8');
var size = buffer.write(functionString, index + 4, 'utf8') + 1;
// Write the size of the string to buffer
buffer[index] = size & 0xff;
buffer[index + 1] = (size >> 8) & 0xff;
buffer[index + 2] = (size >> 16) & 0xff;
buffer[index + 3] = (size >> 24) & 0xff;
// Update index
index = index + size - 1;
index = index + 4 + size - 1;
// Write zero
buffer[index++] = 0;
}
return index;
return index;
}
var serializeBinary = function(buffer, key, value, index) {
@@ -447,7 +447,7 @@ var serializeBinary = function(buffer, key, value, index) {
data.copy(buffer, index, 0, value.position);
// Adjust the index
index = index + value.position;
return index;
return index;
}
var serializeSymbol = function(buffer, key, value, index) {
@@ -458,20 +458,18 @@ var serializeSymbol = function(buffer, key, value, index) {
// Encode the name
index = index + numberOfWrittenBytes;
buffer[index++] = 0;
// Calculate size
var size = Buffer.byteLength(value.value) + 1;
// Write the size of the string to buffer
buffer[index++] = size & 0xff;
buffer[index++] = (size >> 8) & 0xff;
buffer[index++] = (size >> 16) & 0xff;
buffer[index++] = (size >> 24) & 0xff;
// Write the string
buffer.write(value.value, index, 'utf8');
var size = buffer.write(value.value, index + 4, 'utf8') + 1;
// Write the size of the string to buffer
buffer[index] = size & 0xff;
buffer[index + 1] = (size >> 8) & 0xff;
buffer[index + 2] = (size >> 16) & 0xff;
buffer[index + 3] = (size >> 24) & 0xff;
// Update index
index = index + size - 1;
index = index + 4 + size - 1;
// Write zero
buffer[index++] = 0x00;
return index;
return index;
}
var serializeDBRef = function(buffer, key, value, index, depth, serializeFunctions) {
@@ -489,13 +487,13 @@ var serializeDBRef = function(buffer, key, value, index, depth, serializeFunctio
// Serialize object
if(null != value.db) {
endIndex = serializeInto(buffer, {
endIndex = serializeInto(buffer, {
'$ref': value.namespace
, '$id' : value.oid
, '$db' : value.db
}, false, index, depth + 1, serializeFunctions);
} else {
endIndex = serializeInto(buffer, {
endIndex = serializeInto(buffer, {
'$ref': value.namespace
, '$id' : value.oid
}, false, index, depth + 1, serializeFunctions);
@@ -512,132 +510,226 @@ var serializeDBRef = function(buffer, key, value, index, depth, serializeFunctio
return endIndex;
}
var serializeInto = function serializeInto(buffer, object, checkKeys, startingIndex, depth, serializeFunctions) {
startingIndex = startingIndex || 0;
var serializeInto = function serializeInto(buffer, object, checkKeys, startingIndex, depth, serializeFunctions, ignoreUndefined) {
startingIndex = startingIndex || 0;
// Start place to serialize into
var index = startingIndex + 4;
var self = this;
// Start place to serialize into
var index = startingIndex + 4;
var self = this;
// Special case isArray
if(Array.isArray(object)) {
// Get object keys
for(var i = 0; i < object.length; i++) {
var key = "" + i;
var value = object[i];
// Special case isArray
if(Array.isArray(object)) {
// Get object keys
for(var i = 0; i < object.length; i++) {
var key = "" + i;
var value = object[i];
// Is there an override value
if(value && value.toBSON) {
if(typeof value.toBSON != 'function') throw new Error("toBSON is not a function");
value = value.toBSON();
}
// Is there an override value
if(value && value.toBSON) {
if(typeof value.toBSON != 'function') throw new Error("toBSON is not a function");
value = value.toBSON();
}
var type = typeof value;
var type = typeof value;
if(type == 'string') {
index = serializeString(buffer, key, value, index);
} else if(type == 'number') {
index = serializeNumber(buffer, key, value, index);
} else if(type == 'boolean') {
index = serializeBoolean(buffer, key, value, index);
} else if(value instanceof Date || isDate(value)) {
index = serializeDate(buffer, key, value, index);
} else if(type == 'undefined' || value == null) {
index = serializeUndefined(buffer, key, value, index);
} else if(value['_bsontype'] == 'ObjectID') {
index = serializeObjectId(buffer, key, value, index);
} else if(Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index);
} else if(value instanceof RegExp || isRegExp(value)) {
index = serializeRegExp(buffer, key, value, index);
} else if(type == 'object' && value['_bsontype'] == null) {
index = serializeObject(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined);
} else if(value['_bsontype'] == 'Long' || value['_bsontype'] == 'Timestamp') {
index = serializeLong(buffer, key, value, index);
} else if(value['_bsontype'] == 'Double') {
index = serializeDouble(buffer, key, value, index);
} else if(typeof value == 'function' && serializeFunctions) {
index = serializeFunction(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Code') {
index = serializeCode(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined);
} else if(value['_bsontype'] == 'Binary') {
index = serializeBinary(buffer, key, value, index);
} else if(value['_bsontype'] == 'Symbol') {
index = serializeSymbol(buffer, key, value, index);
} else if(value['_bsontype'] == 'DBRef') {
index = serializeDBRef(buffer, key, value, index, depth, serializeFunctions);
} else if(value['_bsontype'] == 'BSONRegExp') {
index = serializeBSONRegExp(buffer, key, value, index);
} else if(value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
index = serializeMinMax(buffer, key, value, index);
}
}
} else if(object instanceof Map) {
var iterator = object.entries();
var done = false;
if(type == 'string') {
index = serializeString(buffer, key, value, index);
} else if(type == 'number') {
index = serializeNumber(buffer, key, value, index);
} else if(type == 'undefined' || value == null) {
index = serializeUndefined(buffer, key, value, index);
} else if(type == 'boolean') {
index = serializeBoolean(buffer, key, value, index);
} else if(value instanceof Date || isDate(value)) {
index = serializeDate(buffer, key, value, index);
} else if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]') {
index = serializeRegExp(buffer, key, value, index);
} else if(value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
index = serializeMinMax(buffer, key, value, index);
} else if(value['_bsontype'] == 'ObjectID') {
index = serializeObjectId(buffer, key, value, index);
} else if(Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index);
} else if(type == 'object' && value['_bsontype'] == null) {
index = serializeObject(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Long' || value['_bsontype'] == 'Timestamp') {
index = serializeLong(buffer, key, value, index);
} else if(value['_bsontype'] == 'Double') {
index = serializeDouble(buffer, key, value, index);
} else if(typeof value == 'function' && serializeFunctions) {
index = serializeFunction(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Code') {
index = serializeCode(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Binary') {
index = serializeBinary(buffer, key, value, index);
} else if(value['_bsontype'] == 'Symbol') {
index = serializeSymbol(buffer, key, value, index);
} else if(value['_bsontype'] == 'DBRef') {
index = serializeDBRef(buffer, key, value, index, depth, serializeFunctions);
}
}
} else {
// Did we provide a custom serialization method
if(object.toBSON) {
if(typeof object.toBSON != 'function') throw new Error("toBSON is not a function");
object = object.toBSON();
if(object != null && typeof object != 'object') throw new Error("toBSON function did not return an object");
}
while(!done) {
// Unpack the next entry
var entry = iterator.next();
done = entry.done;
// Are we done, then skip and terminate
if(done) continue;
// Iterate over all the keys
for(var key in object) {
var value = object[key];
// Is there an override value
if(value && value.toBSON) {
if(typeof value.toBSON != 'function') throw new Error("toBSON is not a function");
value = value.toBSON();
}
// Get the entry values
var key = entry.value[0];
var value = entry.value[1];
// Check the type of the value
var type = typeof value;
// Check the type of the value
var type = typeof value;
// Check the key and throw error if it's illegal
if(key != '$db' && key != '$ref' && key != '$id') {
checkKey(key, !checkKeys);
}
// Check the key and throw error if it's illegal
if(key != '$db' && key != '$ref' && key != '$id') {
if (key.match(regexp) != null) {
// The BSON spec doesn't allow keys with null bytes because keys are
// null-terminated.
throw Error("key " + key + " must not contain null bytes");
}
if (checkKeys) {
if('$' == key[0]) {
throw Error("key " + key + " must not start with '$'");
} else if (!!~key.indexOf('.')) {
throw Error("key " + key + " must not contain '.'");
}
}
}
// console.log("---------------------------------------------------")
// console.dir("key = " + key)
// console.dir("value = " + value)
if(type == 'string') {
index = serializeString(buffer, key, value, index);
} else if(type == 'number') {
index = serializeNumber(buffer, key, value, index);
} else if(type == 'undefined' || value == null) {
index = serializeUndefined(buffer, key, value, index);
} else if(type == 'boolean') {
index = serializeBoolean(buffer, key, value, index);
} else if(value instanceof Date || isDate(value)) {
index = serializeDate(buffer, key, value, index);
} else if(value instanceof RegExp || Object.prototype.toString.call(value) === '[object RegExp]') {
index = serializeRegExp(buffer, key, value, index);
} else if(value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
index = serializeMinMax(buffer, key, value, index);
} else if(value['_bsontype'] == 'ObjectID') {
index = serializeObjectId(buffer, key, value, index);
} else if(Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index);
} else if(type == 'object' && value['_bsontype'] == null) {
index = serializeObject(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Long' || value['_bsontype'] == 'Timestamp') {
index = serializeLong(buffer, key, value, index);
} else if(value['_bsontype'] == 'Double') {
index = serializeDouble(buffer, key, value, index);
} else if(value['_bsontype'] == 'Code') {
index = serializeCode(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(typeof value == 'function' && serializeFunctions) {
index = serializeFunction(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Binary') {
index = serializeBinary(buffer, key, value, index);
} else if(value['_bsontype'] == 'Symbol') {
index = serializeSymbol(buffer, key, value, index);
} else if(value['_bsontype'] == 'DBRef') {
index = serializeDBRef(buffer, key, value, index, depth, serializeFunctions);
}
}
}
index = serializeString(buffer, key, value, index);
} else if(type == 'number') {
index = serializeNumber(buffer, key, value, index);
} else if(type == 'boolean') {
index = serializeBoolean(buffer, key, value, index);
} else if(value instanceof Date || isDate(value)) {
index = serializeDate(buffer, key, value, index);
} else if(value === undefined && ignoreUndefined == true) {
} else if(value === null || value === undefined) {
index = serializeUndefined(buffer, key, value, index);
} else if(value['_bsontype'] == 'ObjectID') {
index = serializeObjectId(buffer, key, value, index);
} else if(Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index);
} else if(value instanceof RegExp || isRegExp(value)) {
index = serializeRegExp(buffer, key, value, index);
} else if(type == 'object' && value['_bsontype'] == null) {
index = serializeObject(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined);
} else if(value['_bsontype'] == 'Long' || value['_bsontype'] == 'Timestamp') {
index = serializeLong(buffer, key, value, index);
} else if(value['_bsontype'] == 'Double') {
index = serializeDouble(buffer, key, value, index);
} else if(value['_bsontype'] == 'Code') {
index = serializeCode(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined);
} else if(typeof value == 'function' && serializeFunctions) {
index = serializeFunction(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Binary') {
index = serializeBinary(buffer, key, value, index);
} else if(value['_bsontype'] == 'Symbol') {
index = serializeSymbol(buffer, key, value, index);
} else if(value['_bsontype'] == 'DBRef') {
index = serializeDBRef(buffer, key, value, index, depth, serializeFunctions);
} else if(value['_bsontype'] == 'BSONRegExp') {
index = serializeBSONRegExp(buffer, key, value, index);
} else if(value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
index = serializeMinMax(buffer, key, value, index);
}
}
} else {
// Did we provide a custom serialization method
if(object.toBSON) {
if(typeof object.toBSON != 'function') throw new Error("toBSON is not a function");
object = object.toBSON();
if(object != null && typeof object != 'object') throw new Error("toBSON function did not return an object");
}
// Final padding byte for object
buffer[index++] = 0x00;
// Iterate over all the keys
for(var key in object) {
var value = object[key];
// Is there an override value
if(value && value.toBSON) {
if(typeof value.toBSON != 'function') throw new Error("toBSON is not a function");
value = value.toBSON();
}
// Final size
var size = index - startingIndex;
// Check the type of the value
var type = typeof value;
// Check the key and throw error if it's illegal
if(key != '$db' && key != '$ref' && key != '$id') {
if (key.match(regexp) != null) {
// The BSON spec doesn't allow keys with null bytes because keys are
// null-terminated.
throw Error("key " + key + " must not contain null bytes");
}
if (checkKeys) {
if('$' == key[0]) {
throw Error("key " + key + " must not start with '$'");
} else if (!!~key.indexOf('.')) {
throw Error("key " + key + " must not contain '.'");
}
}
}
if(type == 'string') {
index = serializeString(buffer, key, value, index);
} else if(type == 'number') {
index = serializeNumber(buffer, key, value, index);
} else if(type == 'boolean') {
index = serializeBoolean(buffer, key, value, index);
} else if(value instanceof Date || isDate(value)) {
index = serializeDate(buffer, key, value, index);
} else if(value === undefined && ignoreUndefined == true) {
} else if(value === null || value === undefined) {
index = serializeUndefined(buffer, key, value, index);
} else if(value['_bsontype'] == 'ObjectID') {
index = serializeObjectId(buffer, key, value, index);
} else if(Buffer.isBuffer(value)) {
index = serializeBuffer(buffer, key, value, index);
} else if(value instanceof RegExp || isRegExp(value)) {
index = serializeRegExp(buffer, key, value, index);
} else if(type == 'object' && value['_bsontype'] == null) {
index = serializeObject(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined);
} else if(value['_bsontype'] == 'Long' || value['_bsontype'] == 'Timestamp') {
index = serializeLong(buffer, key, value, index);
} else if(value['_bsontype'] == 'Double') {
index = serializeDouble(buffer, key, value, index);
} else if(value['_bsontype'] == 'Code') {
index = serializeCode(buffer, key, value, index, checkKeys, depth, serializeFunctions, ignoreUndefined);
} else if(typeof value == 'function' && serializeFunctions) {
index = serializeFunction(buffer, key, value, index, checkKeys, depth, serializeFunctions);
} else if(value['_bsontype'] == 'Binary') {
index = serializeBinary(buffer, key, value, index);
} else if(value['_bsontype'] == 'Symbol') {
index = serializeSymbol(buffer, key, value, index);
} else if(value['_bsontype'] == 'DBRef') {
index = serializeDBRef(buffer, key, value, index, depth, serializeFunctions);
} else if(value['_bsontype'] == 'BSONRegExp') {
index = serializeBSONRegExp(buffer, key, value, index);
} else if(value['_bsontype'] == 'MinKey' || value['_bsontype'] == 'MaxKey') {
index = serializeMinMax(buffer, key, value, index);
}
}
}
// Final padding byte for object
buffer[index++] = 0x00;
// Final size
var size = index - startingIndex;
// Write the size of the object
buffer[startingIndex++] = size & 0xff;
buffer[startingIndex++] = (size >> 8) & 0xff;

View File

@@ -0,0 +1,30 @@
/**
* A class representation of the BSON RegExp type.
*
* @class
* @return {BSONRegExp} A MinKey instance
*/
function BSONRegExp(pattern, options) {
if(!(this instanceof BSONRegExp)) return new BSONRegExp();
// Execute
this._bsontype = 'BSONRegExp';
this.pattern = pattern;
this.options = options;
// Validate options
for(var i = 0; i < options.length; i++) {
if(!(this.options[i] == 'i'
|| this.options[i] == 'm'
|| this.options[i] == 'x'
|| this.options[i] == 'l'
|| this.options[i] == 's'
|| this.options[i] == 'u'
)) {
throw new Error('the regular expression options [' + this.options[i] + "] is not supported");
}
}
}
module.exports = BSONRegExp;
module.exports.BSONRegExp = BSONRegExp;

View File

@@ -1,38 +0,0 @@
.travis.yml
appveyor.yml
test.bat
build/
test/
.vagrant/
# Users Environment Variables
.lock-wscript
.DS_Store
*.swp
*.seed
.project
.settings
./data
node_modules/
# Logs
logs
*.log
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release

View File

@@ -1,43 +0,0 @@
0.1.10 2015-06-17
-----------------
- No longer print to console.error if driver did not load correctly.
0.1.9 2015-06-17
----------------
- Removed deprecated Node C++ method for Nan.h based ones.
0.1.8 2015-06-12
----------------
- Refactored to use single 16MB buffer for all serialization.
0.1.7 2015-05-15
----------------
- Attempt node-pre-gyp otherwise fallback to node-gyp or just fail.
0.1.6 2015-05-07
----------------
- Updated to use bundled node-pre-gyp as install method.
0.1.5 2015-05-07
----------------
- Updated npmignore to remove any non-needed artifacts.
0.1.4 2015-05-05
----------------
- Updated nan.h dependency to 1.8.x.
0.1.3 2015-04-23
----------------
- Windows only prebuilt support (Issue #6, https://github.com/imlucas)
0.1.2 2015-04-20
----------------
- Removed pre-packaged binaries from build.
0.1.1 2015-04-20
----------------
- Merged node-pre-gyp support (Issue #1, https://github.com/imlucas)
0.1.0 2015-03-26
----------------
- First pusht to npm, cleanup of project and left only c++ and test harnesses.

View File

@@ -1,16 +0,0 @@
NODE = node
NPM = npm
NODEUNIT = node_modules/nodeunit/bin/nodeunit
all: clean node_gyp
test: clean node_gyp
npm test
node_gyp: clean
node-gyp configure build
clean:
node-gyp clean
.PHONY: all

View File

@@ -1,37 +0,0 @@
# bson-ext
[![linux build status](https://secure.travis-ci.org/imlucas/bson-ext.png)](http://travis-ci.org/imlucas/bson-ext)
[![windows build status](https://ci.appveyor.com/api/projects/status/github/imlucas/bson-ext)](https://ci.appveyor.com/project/imlucas/bson-ext)
This module contains the BSON [native addon](https://nodejs.org/api/addons.html)
only and is not meant to be used in isolation from the [bson](http://npm.im/bson)
NPM module. It lives in it's own module so it can be an optional
dependency for the [bson](http://npm.im/bson) module.
## Testing
```
npm test
```
## Prebuilt Binaries
Have you ever seen this message in your console?
```
js-bson: Failed to load c++ bson extension, using pure JS version
```
We are experimenting with [node-pre-gyp](http://npm.im/node-pre-gyp) to publish
and install prebuilt binaries. This means you don't need the full toolchain installed
and configured correctly to use this module and you'll never have to see this
message again. Currently, prebuilt binaries will only be used for Windows,
as it is the most problematic platform for this issue. This will also allow us
more time to evaluate the costs and benefits of prebuilt support on OSX and Linux.
If you are interested in prebuilt binary support on OSX or Linux, please
[join the discussion on this issue](https://github.com/christkv/bson-ext/issues/6)!
## License
Apache 2

View File

@@ -1,30 +0,0 @@
{
'targets': [
{
'win_delay_load_hook': 'true',
'target_name': 'bson',
'sources': [ 'ext/bson.cc' ],
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
'include_dirs': [ '<!(node -e "require(\'nan\')")' ],
'conditions': [
['OS=="mac"', {
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES'
}
}],
['OS=="win"', {
'configurations': {
'Release': {
'msvs_settings': {
'VCCLCompilerTool': {
'ExceptionHandling': 1
}
}
}
}
}]
]
}
]
}

View File

@@ -1,332 +0,0 @@
# We borrow heavily from the kernel build setup, though we are simpler since
# we don't have Kconfig tweaking settings on us.
# The implicit make rules have it looking for RCS files, among other things.
# We instead explicitly write all the rules we care about.
# It's even quicker (saves ~200ms) to pass -r on the command line.
MAKEFLAGS=-r
# The source directory tree.
srcdir := ..
abs_srcdir := $(abspath $(srcdir))
# The name of the builddir.
builddir_name ?= .
# The V=1 flag on command line makes us verbosely print command lines.
ifdef V
quiet=
else
quiet=quiet_
endif
# Specify BUILDTYPE=Release on the command line for a release build.
BUILDTYPE ?= Release
# Directory all our build output goes into.
# Note that this must be two directories beneath src/ for unit tests to pass,
# as they reach into the src/ directory for data with relative paths.
builddir ?= $(builddir_name)/$(BUILDTYPE)
abs_builddir := $(abspath $(builddir))
depsdir := $(builddir)/.deps
# Object output directory.
obj := $(builddir)/obj
abs_obj := $(abspath $(obj))
# We build up a list of every single one of the targets so we can slurp in the
# generated dependency rule Makefiles in one pass.
all_deps :=
CC.target ?= $(CC)
CFLAGS.target ?= $(CFLAGS)
CXX.target ?= $(CXX)
CXXFLAGS.target ?= $(CXXFLAGS)
LINK.target ?= $(LINK)
LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR)
# C++ apps need to be linked with g++.
#
# Note: flock is used to seralize linking. Linking is a memory-intensive
# process so running parallel links can often lead to thrashing. To disable
# the serialization, override LINK via an envrionment variable as follows:
#
# export LINK=g++
#
# This will allow make to invoke N linker processes as specified in -jN.
LINK ?= flock $(builddir)/linker.lock $(CXX.target)
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well.
CC.host ?= gcc
CFLAGS.host ?=
CXX.host ?= g++
CXXFLAGS.host ?=
LINK.host ?= $(CXX.host)
LDFLAGS.host ?=
AR.host ?= ar
# Define a dir function that can handle spaces.
# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
# "leading spaces cannot appear in the text of the first argument as written.
# These characters can be put into the argument value by variable substitution."
empty :=
space := $(empty) $(empty)
# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
replace_spaces = $(subst $(space),?,$1)
unreplace_spaces = $(subst ?,$(space),$1)
dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
# Flags to make gcc output dependency info. Note that you need to be
# careful here to use the flags that ccache and distcc can understand.
# We write to a dep file on the side first and then rename at the end
# so we can't end up with a broken dep file.
depfile = $(depsdir)/$(call replace_spaces,$@).d
DEPFLAGS = -MMD -MF $(depfile).raw
# We have to fixup the deps output in a few ways.
# (1) the file output should mention the proper .o file.
# ccache or distcc lose the path to the target, so we convert a rule of
# the form:
# foobar.o: DEP1 DEP2
# into
# path/to/foobar.o: DEP1 DEP2
# (2) we want missing files not to cause us to fail to build.
# We want to rewrite
# foobar.o: DEP1 DEP2 \
# DEP3
# to
# DEP1:
# DEP2:
# DEP3:
# so if the files are missing, they're just considered phony rules.
# We have to do some pretty insane escaping to get those backslashes
# and dollar signs past make, the shell, and sed at the same time.
# Doesn't work with spaces, but that's fine: .d files have spaces in
# their names replaced with other characters.
define fixup_dep
# The depfile may not exist if the input file didn't have any #includes.
touch $(depfile).raw
# Fixup path as in (1).
sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
# Add extra rules as in (2).
# We remove slashes and replace spaces with new lines;
# remove blank lines;
# delete the first line and append a colon to the remaining lines.
sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
grep -v '^$$' |\
sed -e 1d -e 's|$$|:|' \
>> $(depfile)
rm $(depfile).raw
endef
# Command definitions:
# - cmd_foo is the actual command to run;
# - quiet_cmd_foo is the brief-output summary of the command.
quiet_cmd_cc = CC($(TOOLSET)) $@
cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_cxx = CXX($(TOOLSET)) $@
cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_touch = TOUCH $@
cmd_touch = touch $@
quiet_cmd_copy = COPY $@
# send stderr to /dev/null to ignore messages when linking directories.
cmd_copy = rm -rf "$@" && cp -af "$<" "$@"
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
# We support two kinds of shared objects (.so):
# 1) shared_library, which is just bundling together many dependent libraries
# into a link line.
# 2) loadable_module, which is generating a module intended for dlopen().
#
# They differ only slightly:
# In the former case, we want to package all dependent code into the .so.
# In the latter case, we want to package just the API exposed by the
# outermost module.
# This means shared_library uses --whole-archive, while loadable_module doesn't.
# (Note that --whole-archive is incompatible with the --start-group used in
# normal linking.)
# Other shared-object link notes:
# - Set SONAME to the library filename so our binaries don't reference
# the local, absolute paths used on the link command-line.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
# Define an escape_quotes function to escape single quotes.
# This allows us to handle quotes properly as long as we always use
# use single quotes and escape_quotes.
escape_quotes = $(subst ','\'',$(1))
# This comment is here just to include a ' to unconfuse syntax highlighting.
# Define an escape_vars function to escape '$' variable syntax.
# This allows us to read/write command lines with shell variables (e.g.
# $LD_LIBRARY_PATH), without triggering make substitution.
escape_vars = $(subst $$,$$$$,$(1))
# Helper that expands to a shell command to echo a string exactly as it is in
# make. This uses printf instead of echo because printf's behaviour with respect
# to escape sequences is more portable than echo's across different shells
# (e.g., dash, bash).
exact_echo = printf '%s\n' '$(call escape_quotes,$(1))'
# Helper to compare the command we're about to run against the command
# we logged the last time we ran the command. Produces an empty
# string (false) when the commands match.
# Tricky point: Make has no string-equality test function.
# The kernel uses the following, but it seems like it would have false
# positives, where one string reordered its arguments.
# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \
# $(filter-out $(cmd_$@), $(cmd_$(1))))
# We instead substitute each for the empty string into the other, and
# say they're equal if both substitutions produce the empty string.
# .d files contain ? instead of spaces, take that into account.
command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\
$(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
# Helper that is non-empty when a prerequisite changes.
# Normally make does this implicitly, but we force rules to always run
# so we can check their command lines.
# $? -- new prerequisites
# $| -- order-only dependencies
prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
# Helper that executes all postbuilds until one fails.
define do_postbuilds
@E=0;\
for p in $(POSTBUILDS); do\
eval $$p;\
E=$$?;\
if [ $$E -ne 0 ]; then\
break;\
fi;\
done;\
if [ $$E -ne 0 ]; then\
rm -rf "$@";\
exit $$E;\
fi
endef
# do_cmd: run a command via the above cmd_foo names, if necessary.
# Should always run for a given target to handle command-line changes.
# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
# Third argument, if non-zero, makes it do POSTBUILDS processing.
# Note: We intentionally do NOT call dirx for depfile, since it contains ? for
# spaces already and dirx strips the ? characters.
define do_cmd
$(if $(or $(command_changed),$(prereq_changed)),
@$(call exact_echo, $($(quiet)cmd_$(1)))
@mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
$(if $(findstring flock,$(word 1,$(cmd_$1))),
@$(cmd_$(1))
@echo " $(quiet_cmd_$(1)): Finished",
@$(cmd_$(1))
)
@$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
@$(if $(2),$(fixup_dep))
$(if $(and $(3), $(POSTBUILDS)),
$(call do_postbuilds)
)
)
endef
# Declare the "all" target first so it is the default,
# even though we don't have the deps yet.
.PHONY: all
all:
# make looks for ways to re-generate included makefiles, but in our case, we
# don't have a direct way. Explicitly telling make that it has nothing to do
# for them makes it go faster.
%.d: ;
# Use FORCE_DO_CMD to force a target to run. Should be coupled with
# do_cmd.
.PHONY: FORCE_DO_CMD
FORCE_DO_CMD:
TOOLSET := target
# Suffix rules, putting all outputs into $(obj).
$(obj).$(TOOLSET)/%.o: $(srcdir)/%.c FORCE_DO_CMD
@$(call do_cmd,cc,1)
$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cpp FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(srcdir)/%.cxx FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(srcdir)/%.S FORCE_DO_CMD
@$(call do_cmd,cc,1)
$(obj).$(TOOLSET)/%.o: $(srcdir)/%.s FORCE_DO_CMD
@$(call do_cmd,cc,1)
# Try building from generated source, too.
$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.c FORCE_DO_CMD
@$(call do_cmd,cc,1)
$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cpp FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.cxx FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.S FORCE_DO_CMD
@$(call do_cmd,cc,1)
$(obj).$(TOOLSET)/%.o: $(obj).$(TOOLSET)/%.s FORCE_DO_CMD
@$(call do_cmd,cc,1)
$(obj).$(TOOLSET)/%.o: $(obj)/%.c FORCE_DO_CMD
@$(call do_cmd,cc,1)
$(obj).$(TOOLSET)/%.o: $(obj)/%.cc FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(obj)/%.cpp FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(obj)/%.cxx FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/%.o: $(obj)/%.S FORCE_DO_CMD
@$(call do_cmd,cc,1)
$(obj).$(TOOLSET)/%.o: $(obj)/%.s FORCE_DO_CMD
@$(call do_cmd,cc,1)
ifeq ($(strip $(foreach prefix,$(NO_LOAD),\
$(findstring $(join ^,$(prefix)),\
$(join ^,bson.target.mk)))),)
include bson.target.mk
endif
quiet_cmd_regen_makefile = ACTION Regenerating $@
cmd_regen_makefile = cd $(srcdir); /usr/local/lib/node_modules/npm/node_modules/node-gyp/gyp/gyp_main.py -fmake --ignore-environment "--toplevel-dir=." -I/home/mitchell/Desktop/myapp/node_modules/mongoose/node_modules/bson/node_modules/bson-ext/build/config.gypi -I/usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi -I/home/mitchell/.node-gyp/0.12.4/common.gypi "--depth=." "-Goutput_dir=." "--generator-output=build" "-Dlibrary=shared_library" "-Dvisibility=default" "-Dnode_root_dir=/home/mitchell/.node-gyp/0.12.4" "-Dmodule_root_dir=/home/mitchell/Desktop/myapp/node_modules/mongoose/node_modules/bson/node_modules/bson-ext" binding.gyp
Makefile: $(srcdir)/../../../../../../../../.node-gyp/0.12.4/common.gypi $(srcdir)/build/config.gypi $(srcdir)/binding.gyp $(srcdir)/../../../../../../../../../../usr/local/lib/node_modules/npm/node_modules/node-gyp/addon.gypi
$(call do_cmd,regen_makefile)
# "all" is a concatenation of the "all" targets from all the included
# sub-makefiles. This is just here to clarify.
all:
# Add in dependency-tracking rules. $(all_deps) is the list of every single
# target in our tree. Only consider the ones with .d (dependency) info:
d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
ifneq ($(d_files),)
include $(d_files)
endif

View File

@@ -1 +0,0 @@
cmd_Release/bson.node := rm -rf "Release/bson.node" && cp -af "Release/obj.target/bson.node" "Release/bson.node"

View File

@@ -1 +0,0 @@
cmd_Release/obj.target/bson.node := flock ./Release/linker.lock g++ -shared -pthread -rdynamic -m64 -Wl,-soname=bson.node -o Release/obj.target/bson.node -Wl,--start-group Release/obj.target/bson/ext/bson.o -Wl,--end-group

View File

@@ -1,42 +0,0 @@
cmd_Release/obj.target/bson/ext/bson.o := g++ '-D_LARGEFILE_SOURCE' '-D_FILE_OFFSET_BITS=64' '-DBUILDING_NODE_EXTENSION' -I/home/mitchell/.node-gyp/0.12.4/src -I/home/mitchell/.node-gyp/0.12.4/deps/uv/include -I/home/mitchell/.node-gyp/0.12.4/deps/v8/include -I../node_modules/nan -fPIC -pthread -Wall -Wextra -Wno-unused-parameter -m64 -O3 -ffunction-sections -fdata-sections -fno-tree-vrp -fno-omit-frame-pointer -fno-rtti -MMD -MF ./Release/.deps/Release/obj.target/bson/ext/bson.o.d.raw -c -o Release/obj.target/bson/ext/bson.o ../ext/bson.cc
Release/obj.target/bson/ext/bson.o: ../ext/bson.cc \
/home/mitchell/.node-gyp/0.12.4/deps/v8/include/v8.h \
/home/mitchell/.node-gyp/0.12.4/deps/v8/include/v8stdint.h \
/home/mitchell/.node-gyp/0.12.4/deps/v8/include/v8config.h \
/home/mitchell/.node-gyp/0.12.4/src/node.h \
/home/mitchell/.node-gyp/0.12.4/src/node_version.h \
/home/mitchell/.node-gyp/0.12.4/src/node_version.h \
/home/mitchell/.node-gyp/0.12.4/src/node_buffer.h \
/home/mitchell/.node-gyp/0.12.4/src/node.h \
/home/mitchell/.node-gyp/0.12.4/src/smalloc.h ../ext/bson.h \
/home/mitchell/.node-gyp/0.12.4/src/node_object_wrap.h \
../node_modules/nan/nan.h \
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv.h \
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-errno.h \
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-version.h \
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-unix.h \
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-threadpool.h \
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-linux.h \
../node_modules/nan/nan_new.h \
../node_modules/nan/nan_implementation_12_inl.h
../ext/bson.cc:
/home/mitchell/.node-gyp/0.12.4/deps/v8/include/v8.h:
/home/mitchell/.node-gyp/0.12.4/deps/v8/include/v8stdint.h:
/home/mitchell/.node-gyp/0.12.4/deps/v8/include/v8config.h:
/home/mitchell/.node-gyp/0.12.4/src/node.h:
/home/mitchell/.node-gyp/0.12.4/src/node_version.h:
/home/mitchell/.node-gyp/0.12.4/src/node_version.h:
/home/mitchell/.node-gyp/0.12.4/src/node_buffer.h:
/home/mitchell/.node-gyp/0.12.4/src/node.h:
/home/mitchell/.node-gyp/0.12.4/src/smalloc.h:
../ext/bson.h:
/home/mitchell/.node-gyp/0.12.4/src/node_object_wrap.h:
../node_modules/nan/nan.h:
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv.h:
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-errno.h:
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-version.h:
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-unix.h:
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-threadpool.h:
/home/mitchell/.node-gyp/0.12.4/deps/uv/include/uv-linux.h:
../node_modules/nan/nan_new.h:
../node_modules/nan/nan_implementation_12_inl.h:

View File

@@ -1,6 +0,0 @@
# This file is generated by gyp; do not edit.
export builddir_name ?= ./build/.
.PHONY: all
all:
$(MAKE) bson

View File

@@ -1,131 +0,0 @@
# This file is generated by gyp; do not edit.
TOOLSET := target
TARGET := bson
DEFS_Debug := \
'-D_LARGEFILE_SOURCE' \
'-D_FILE_OFFSET_BITS=64' \
'-DBUILDING_NODE_EXTENSION' \
'-DDEBUG' \
'-D_DEBUG'
# Flags passed to all source files.
CFLAGS_Debug := \
-fPIC \
-pthread \
-Wall \
-Wextra \
-Wno-unused-parameter \
-m64 \
-g \
-O0
# Flags passed to only C files.
CFLAGS_C_Debug :=
# Flags passed to only C++ files.
CFLAGS_CC_Debug := \
-fno-rtti
INCS_Debug := \
-I/home/mitchell/.node-gyp/0.12.4/src \
-I/home/mitchell/.node-gyp/0.12.4/deps/uv/include \
-I/home/mitchell/.node-gyp/0.12.4/deps/v8/include \
-I$(srcdir)/node_modules/nan
DEFS_Release := \
'-D_LARGEFILE_SOURCE' \
'-D_FILE_OFFSET_BITS=64' \
'-DBUILDING_NODE_EXTENSION'
# Flags passed to all source files.
CFLAGS_Release := \
-fPIC \
-pthread \
-Wall \
-Wextra \
-Wno-unused-parameter \
-m64 \
-O3 \
-ffunction-sections \
-fdata-sections \
-fno-tree-vrp \
-fno-omit-frame-pointer
# Flags passed to only C files.
CFLAGS_C_Release :=
# Flags passed to only C++ files.
CFLAGS_CC_Release := \
-fno-rtti
INCS_Release := \
-I/home/mitchell/.node-gyp/0.12.4/src \
-I/home/mitchell/.node-gyp/0.12.4/deps/uv/include \
-I/home/mitchell/.node-gyp/0.12.4/deps/v8/include \
-I$(srcdir)/node_modules/nan
OBJS := \
$(obj).target/$(TARGET)/ext/bson.o
# Add to the list of files we specially track dependencies for.
all_deps += $(OBJS)
# CFLAGS et al overrides must be target-local.
# See "Target-specific Variable Values" in the GNU Make manual.
$(OBJS): TOOLSET := $(TOOLSET)
$(OBJS): GYP_CFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_C_$(BUILDTYPE))
$(OBJS): GYP_CXXFLAGS := $(DEFS_$(BUILDTYPE)) $(INCS_$(BUILDTYPE)) $(CFLAGS_$(BUILDTYPE)) $(CFLAGS_CC_$(BUILDTYPE))
# Suffix rules, putting all outputs into $(obj).
$(obj).$(TOOLSET)/$(TARGET)/%.o: $(srcdir)/%.cc FORCE_DO_CMD
@$(call do_cmd,cxx,1)
# Try building from generated source, too.
$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj).$(TOOLSET)/%.cc FORCE_DO_CMD
@$(call do_cmd,cxx,1)
$(obj).$(TOOLSET)/$(TARGET)/%.o: $(obj)/%.cc FORCE_DO_CMD
@$(call do_cmd,cxx,1)
# End of this set of suffix rules
### Rules for final target.
LDFLAGS_Debug := \
-pthread \
-rdynamic \
-m64
LDFLAGS_Release := \
-pthread \
-rdynamic \
-m64
LIBS :=
$(obj).target/bson.node: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))
$(obj).target/bson.node: LIBS := $(LIBS)
$(obj).target/bson.node: TOOLSET := $(TOOLSET)
$(obj).target/bson.node: $(OBJS) FORCE_DO_CMD
$(call do_cmd,solink_module)
all_deps += $(obj).target/bson.node
# Add target alias
.PHONY: bson
bson: $(builddir)/bson.node
# Copy this to the executable output path.
$(builddir)/bson.node: TOOLSET := $(TOOLSET)
$(builddir)/bson.node: $(obj).target/bson.node FORCE_DO_CMD
$(call do_cmd,copy)
all_deps += $(builddir)/bson.node
# Short alias for building this executable.
.PHONY: bson.node
bson.node: $(obj).target/bson.node $(builddir)/bson.node
# Add executable to "all" target.
.PHONY: all
all: $(builddir)/bson.node

View File

@@ -1,135 +0,0 @@
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 0,
"gcc_version": 48,
"host_arch": "x64",
"icu_small": "false",
"node_install_npm": "true",
"node_prefix": "",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_dtrace": "false",
"node_use_etw": "false",
"node_use_mdb": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"openssl_no_asm": 0,
"python": "/usr/bin/python",
"target_arch": "x64",
"uv_library": "static_library",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "false",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 0,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"nodedir": "/home/mitchell/.node-gyp/0.12.4",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"fallback_to_build": "true",
"module": "/home/mitchell/Desktop/myapp/node_modules/mongoose/node_modules/bson/node_modules/bson-ext/build/Release/bson.node",
"module_name": "bson",
"module_path": "/home/mitchell/Desktop/myapp/node_modules/mongoose/node_modules/bson/node_modules/bson-ext/build/Release",
"cache_lock_stale": "60000",
"sign_git_tag": "",
"user_agent": "npm/2.10.1 node/v0.12.4 linux x64",
"always_auth": "",
"bin_links": "true",
"key": "",
"description": "true",
"fetch_retries": "2",
"heading": "npm",
"if_present": "",
"init_version": "1.0.0",
"user": "",
"force": "",
"cache_min": "10",
"init_license": "ISC",
"editor": "vi",
"rollback": "true",
"tag_version_prefix": "v",
"cache_max": "Infinity",
"userconfig": "/home/mitchell/.npmrc",
"engine_strict": "",
"init_author_name": "",
"init_author_url": "",
"tmp": "/tmp",
"depth": "Infinity",
"save_dev": "",
"usage": "",
"cafile": "",
"https_proxy": "",
"onload_script": "",
"rebuild_bundle": "true",
"save_bundle": "",
"shell": "/bin/bash",
"prefix": "/usr/local",
"browser": "",
"cache_lock_wait": "10000",
"registry": "https://registry.npmjs.org/",
"save_optional": "",
"scope": "",
"searchopts": "",
"versions": "",
"cache": "/home/mitchell/.npm",
"ignore_scripts": "",
"searchsort": "name",
"version": "",
"local_address": "",
"viewer": "man",
"color": "true",
"fetch_retry_mintimeout": "10000",
"umask": "0002",
"fetch_retry_maxtimeout": "60000",
"message": "%s",
"ca": "",
"cert": "",
"global": "",
"link": "",
"access": "",
"save": "",
"unicode": "true",
"long": "",
"production": "",
"unsafe_perm": "true",
"node_version": "0.12.4",
"tag": "latest",
"git_tag_version": "true",
"shrinkwrap": "true",
"fetch_retry_factor": "10",
"npat": "",
"proprietary_attribs": "true",
"save_exact": "",
"strict_ssl": "true",
"dev": "",
"globalconfig": "/usr/local/etc/npmrc",
"init_module": "/home/mitchell/.npm-init.js",
"parseable": "",
"globalignorefile": "/usr/local/etc/npmignore",
"cache_lock_retries": "10",
"save_prefix": "^",
"group": "1000",
"init_author_email": "",
"searchexclude": "",
"git": "git",
"optional": "true",
"json": "",
"spin": "true"
}
}

View File

@@ -1,3 +0,0 @@
build/
./bson.node
.lock-wscript

View File

@@ -1,28 +0,0 @@
NODE = node
name = all
JOBS = 1
all:
rm -rf build .lock-wscript bson.node
node-waf configure build
cp -R ./build/Release/bson.node . || true
all_debug:
rm -rf build .lock-wscript bson.node
node-waf --debug configure build
cp -R ./build/Release/bson.node . || true
clang:
rm -rf build .lock-wscript bson.node
CXX=clang node-waf configure build
cp -R ./build/Release/bson.node . || true
clang_debug:
rm -rf build .lock-wscript bson.node
CXX=clang node-waf --debug configure build
cp -R ./build/Release/bson.node . || true
clean:
rm -rf build .lock-wscript bson.node
.PHONY: all

File diff suppressed because it is too large Load Diff

View File

@@ -1,374 +0,0 @@
//===========================================================================
#ifndef BSON_H_
#define BSON_H_
//===========================================================================
#ifdef __arm__
#define USE_MISALIGNED_MEMORY_ACCESS 0
#else
#define USE_MISALIGNED_MEMORY_ACCESS 1
#endif
#include <node.h>
#include <node_object_wrap.h>
#include <v8.h>
#include "nan.h"
using namespace v8;
using namespace node;
//===========================================================================
enum BsonType
{
BSON_TYPE_NUMBER = 1,
BSON_TYPE_STRING = 2,
BSON_TYPE_OBJECT = 3,
BSON_TYPE_ARRAY = 4,
BSON_TYPE_BINARY = 5,
BSON_TYPE_UNDEFINED = 6,
BSON_TYPE_OID = 7,
BSON_TYPE_BOOLEAN = 8,
BSON_TYPE_DATE = 9,
BSON_TYPE_NULL = 10,
BSON_TYPE_REGEXP = 11,
BSON_TYPE_CODE = 13,
BSON_TYPE_SYMBOL = 14,
BSON_TYPE_CODE_W_SCOPE = 15,
BSON_TYPE_INT = 16,
BSON_TYPE_TIMESTAMP = 17,
BSON_TYPE_LONG = 18,
BSON_TYPE_MAX_KEY = 0x7f,
BSON_TYPE_MIN_KEY = 0xff
};
//===========================================================================
template<typename T> class BSONSerializer;
class BSON : public ObjectWrap {
public:
BSON();
~BSON() {}
static void Initialize(Handle<Object> target);
static NAN_METHOD(BSONDeserializeStream);
// JS based objects
static NAN_METHOD(BSONSerialize);
static NAN_METHOD(BSONDeserialize);
// Calculate size of function
static NAN_METHOD(CalculateObjectSize);
static NAN_METHOD(SerializeWithBufferAndIndex);
// Constructor used for creating new BSON objects from C++
static Persistent<FunctionTemplate> constructor_template;
public:
Persistent<Object> buffer;
size_t maxBSONSize;
private:
static NAN_METHOD(New);
static Handle<Value> deserialize(BSON *bson, char *data, uint32_t dataLength, uint32_t startIndex, bool is_array_item);
// BSON type instantiate functions
Persistent<Function> longConstructor;
Persistent<Function> objectIDConstructor;
Persistent<Function> binaryConstructor;
Persistent<Function> codeConstructor;
Persistent<Function> dbrefConstructor;
Persistent<Function> symbolConstructor;
Persistent<Function> doubleConstructor;
Persistent<Function> timestampConstructor;
Persistent<Function> minKeyConstructor;
Persistent<Function> maxKeyConstructor;
// Equality Objects
Persistent<String> longString;
Persistent<String> objectIDString;
Persistent<String> binaryString;
Persistent<String> codeString;
Persistent<String> dbrefString;
Persistent<String> symbolString;
Persistent<String> doubleString;
Persistent<String> timestampString;
Persistent<String> minKeyString;
Persistent<String> maxKeyString;
// Equality speed up comparison objects
Persistent<String> _bsontypeString;
Persistent<String> _longLowString;
Persistent<String> _longHighString;
Persistent<String> _objectIDidString;
Persistent<String> _binaryPositionString;
Persistent<String> _binarySubTypeString;
Persistent<String> _binaryBufferString;
Persistent<String> _doubleValueString;
Persistent<String> _symbolValueString;
Persistent<String> _dbRefRefString;
Persistent<String> _dbRefIdRefString;
Persistent<String> _dbRefDbRefString;
Persistent<String> _dbRefNamespaceString;
Persistent<String> _dbRefDbString;
Persistent<String> _dbRefOidString;
Persistent<String> _codeCodeString;
Persistent<String> _codeScopeString;
Persistent<String> _toBSONString;
Local<Object> GetSerializeObject(const Handle<Value>& object);
template<typename T> friend class BSONSerializer;
friend class BSONDeserializer;
};
//===========================================================================
class CountStream
{
public:
CountStream() : count(0) { }
void WriteByte(int value) { ++count; }
void WriteByte(const Handle<Object>&, const Handle<String>&) { ++count; }
void WriteBool(const Handle<Value>& value) { ++count; }
void WriteInt32(int32_t value) { count += 4; }
void WriteInt32(const Handle<Value>& value) { count += 4; }
void WriteInt32(const Handle<Object>& object, const Handle<String>& key) { count += 4; }
void WriteInt64(int64_t value) { count += 8; }
void WriteInt64(const Handle<Value>& value) { count += 8; }
void WriteDouble(double value) { count += 8; }
void WriteDouble(const Handle<Value>& value) { count += 8; }
void WriteDouble(const Handle<Object>&, const Handle<String>&) { count += 8; }
void WriteUInt32String(uint32_t name) { char buffer[32]; count += sprintf(buffer, "%u", name) + 1; }
void WriteLengthPrefixedString(const Local<String>& value) { count += value->Utf8Length()+5; }
void WriteObjectId(const Handle<Object>& object, const Handle<String>& key) { count += 12; }
void WriteString(const Local<String>& value) { count += value->Utf8Length() + 1; } // This returns the number of bytes exclusive of the NULL terminator
void WriteData(const char* data, size_t length) { count += length; }
void* BeginWriteType() { ++count; return NULL; }
void CommitType(void*, BsonType) { }
void* BeginWriteSize() { count += 4; return NULL; }
void CommitSize(void*) { }
size_t GetSerializeSize() const { return count; }
// Do nothing. CheckKey is implemented for DataStream
void CheckKey(const Local<String>&) { }
private:
size_t count;
};
const size_t MAX_BSON_SIZE (1024*1024*17);
class DataStream
{
public:
DataStream(char* aDestinationBuffer) : destinationBuffer(aDestinationBuffer), p(aDestinationBuffer) { }
void WriteByte(int value) {
if((size_t)((p + 1) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
*p++ = value;
}
void WriteByte(const Handle<Object>& object, const Handle<String>& key) {
if((size_t)((p + 1) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
*p++ = object->Get(key)->Int32Value();
}
#if USE_MISALIGNED_MEMORY_ACCESS
void WriteInt32(int32_t value) {
if((size_t)((p + 4) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
*reinterpret_cast<int32_t*>(p) = value;
p += 4;
}
void WriteInt64(int64_t value) {
if((size_t)((p + 8) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
*reinterpret_cast<int64_t*>(p) = value;
p += 8;
}
void WriteDouble(double value) {
if((size_t)((p + 8) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
*reinterpret_cast<double*>(p) = value;
p += 8;
}
#else
void WriteInt32(int32_t value) {
if((size_t)((p + 4) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
memcpy(p, &value, 4);
p += 4;
}
void WriteInt64(int64_t value) {
if((size_t)((p + 8) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
memcpy(p, &value, 8);
p += 8;
}
void WriteDouble(double value) {
if((size_t)((p + 8) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
memcpy(p, &value, 8);
p += 8;
}
#endif
void WriteBool(const Handle<Value>& value) {
WriteByte(value->BooleanValue() ? 1 : 0);
}
void WriteInt32(const Handle<Value>& value) {
WriteInt32(value->Int32Value());
}
void WriteInt32(const Handle<Object>& object, const Handle<String>& key) {
WriteInt32(object->Get(key));
}
void WriteInt64(const Handle<Value>& value) {
WriteInt64(value->IntegerValue());
}
void WriteDouble(const Handle<Value>& value) {
WriteDouble(value->NumberValue());
}
void WriteDouble(const Handle<Object>& object, const Handle<String>& key) {
WriteDouble(object->Get(key));
}
void WriteUInt32String(uint32_t name) {
p += sprintf(p, "%u", name) + 1;
}
void WriteLengthPrefixedString(const Local<String>& value) {
int32_t length = value->Utf8Length()+1;
if((size_t)((p + length) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
WriteInt32(length);
WriteString(value);
}
void WriteObjectId(const Handle<Object>& object, const Handle<String>& key);
void WriteString(const Local<String>& value) {
if((size_t)(p - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
p += value->WriteUtf8(p);
} // This returns the number of bytes inclusive of the NULL terminator.
void WriteData(const char* data, size_t length) {
if((size_t)((p + length) - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
memcpy(p, data, length);
p += length;
}
void* BeginWriteType() {
void* returnValue = p; p++;
return returnValue;
}
void CommitType(void* beginPoint, BsonType value) {
*reinterpret_cast<unsigned char*>(beginPoint) = value;
}
void* BeginWriteSize() {
if((size_t)(p - destinationBuffer) > MAX_BSON_SIZE) throw "document is larger than max bson document size of 16MB";
void* returnValue = p; p += 4;
return returnValue;
}
#if USE_MISALIGNED_MEMORY_ACCESS
void CommitSize(void* beginPoint) {
*reinterpret_cast<int32_t*>(beginPoint) = (int32_t) (p - (char*) beginPoint);
}
#else
void CommitSize(void* beginPoint) {
int32_t value = (int32_t) (p - (char*) beginPoint);
memcpy(beginPoint, &value, 4);
}
#endif
size_t GetSerializeSize() const {
return p - destinationBuffer;
}
void CheckKey(const Local<String>& keyName);
public:
char *const destinationBuffer; // base, never changes
char* p; // cursor into buffer
};
template<typename T> class BSONSerializer : public T
{
private:
typedef T Inherited;
public:
BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions) : Inherited(), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { }
BSONSerializer(BSON* aBson, bool aCheckKeys, bool aSerializeFunctions, char* parentParam) : Inherited(parentParam), checkKeys(aCheckKeys), serializeFunctions(aSerializeFunctions), bson(aBson) { }
void SerializeDocument(const Handle<Value>& value);
void SerializeArray(const Handle<Value>& value);
void SerializeValue(void* typeLocation, const Handle<Value> value);
private:
bool checkKeys;
bool serializeFunctions;
BSON* bson;
};
//===========================================================================
class BSONDeserializer
{
public:
BSONDeserializer(BSON* aBson, char* data, size_t length);
BSONDeserializer(BSONDeserializer& parentSerializer, size_t length);
Handle<Value> DeserializeDocument(bool promoteLongs);
bool HasMoreData() const { return p < pEnd; }
Handle<Value> ReadCString();
uint32_t ReadIntegerString();
int32_t ReadRegexOptions();
Local<String> ReadString();
Local<String> ReadObjectId();
unsigned char ReadByte() { return *reinterpret_cast<unsigned char*>(p++); }
#if USE_MISALIGNED_MEMORY_ACCESS
int32_t ReadInt32() { int32_t returnValue = *reinterpret_cast<int32_t*>(p); p += 4; return returnValue; }
uint32_t ReadUInt32() { uint32_t returnValue = *reinterpret_cast<uint32_t*>(p); p += 4; return returnValue; }
int64_t ReadInt64() { int64_t returnValue = *reinterpret_cast<int64_t*>(p); p += 8; return returnValue; }
double ReadDouble() { double returnValue = *reinterpret_cast<double*>(p); p += 8; return returnValue; }
#else
int32_t ReadInt32() { int32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; }
uint32_t ReadUInt32() { uint32_t returnValue; memcpy(&returnValue, p, 4); p += 4; return returnValue; }
int64_t ReadInt64() { int64_t returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; }
double ReadDouble() { double returnValue; memcpy(&returnValue, p, 8); p += 8; return returnValue; }
#endif
size_t GetSerializeSize() const { return p - pStart; }
private:
Handle<Value> DeserializeArray(bool promoteLongs);
Handle<Value> DeserializeValue(BsonType type, bool promoteLongs);
Handle<Value> DeserializeDocumentInternal(bool promoteLongs);
Handle<Value> DeserializeArrayInternal(bool promoteLongs);
BSON* bson;
char* const pStart;
char* p;
char* const pEnd;
};
//===========================================================================
#endif // BSON_H_
//===========================================================================

View File

@@ -1,29 +0,0 @@
var bson = null;
try {
// Load the precompiled win32 binary
if(process.platform == "win32" && process.arch == "x64") {
bson = require('./win32/x64/bson');
} else if(process.platform == "win32" && process.arch == "ia32") {
bson = require('./win32/ia32/bson');
} else {
bson = require('../build/Release/bson');
}
} catch(err) {
// Attempt to load the release bson version
try {
bson = require('../build/Release/bson');
} catch (err) {
throw new Error("js-bson: Failed to load c++ bson extension, using pure JS version");
}
}
exports.BSON = bson.BSON;
// Just add constants tot he Native BSON parser
exports.BSON.BSON_BINARY_SUBTYPE_DEFAULT = 0;
exports.BSON.BSON_BINARY_SUBTYPE_FUNCTION = 1;
exports.BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY = 2;
exports.BSON.BSON_BINARY_SUBTYPE_UUID = 3;
exports.BSON.BSON_BINARY_SUBTYPE_MD5 = 4;
exports.BSON.BSON_BINARY_SUBTYPE_USER_DEFINED = 128;

View File

@@ -1,39 +0,0 @@
import Options
from os import unlink, symlink, popen
from os.path import exists
srcdir = "."
blddir = "build"
VERSION = "0.1.0"
def set_options(opt):
opt.tool_options("compiler_cxx")
opt.add_option( '--debug'
, action='store_true'
, default=False
, help='Build debug variant [Default: False]'
, dest='debug'
)
def configure(conf):
conf.check_tool("compiler_cxx")
conf.check_tool("node_addon")
conf.env.append_value('CXXFLAGS', ['-O3', '-funroll-loops'])
# conf.env.append_value('CXXFLAGS', ['-DDEBUG', '-g', '-O0', '-Wall', '-Wextra'])
# conf.check(lib='node', libpath=['/usr/lib', '/usr/local/lib'], uselib_store='NODE')
def build(bld):
obj = bld.new_task_gen("cxx", "shlib", "node_addon")
obj.target = "bson"
obj.source = ["bson.cc"]
# obj.uselib = "NODE"
def shutdown():
# HACK to get compress.node out of build directory.
# better way to do this?
if Options.commands['clean']:
if exists('bson.node'): unlink('bson.node')
else:
if exists('build/default/bson.node') and not exists('bson.node'):
symlink('build/default/bson.node', 'bson.node')

View File

@@ -1,14 +0,0 @@
var BSON = require('./ext').BSON;
// BSON MAX VALUES
BSON.BSON_INT32_MAX = 0x7FFFFFFF;
BSON.BSON_INT32_MIN = -0x80000000;
BSON.BSON_INT64_MAX = Math.pow(2, 63) - 1;
BSON.BSON_INT64_MIN = -Math.pow(2, 63);
// JS MAX PRECISE VALUES
BSON.JS_INT_MAX = 0x20000000000000; // Any integer up to 2^53 can be precisely represented by a double.
BSON.JS_INT_MIN = -0x20000000000000; // Any integer down to -2^53 can be precisely represented by a double.
module.exports = BSON;

View File

@@ -1,344 +0,0 @@
/**
* Module dependencies.
* @ignore
*/
if(typeof window === 'undefined') {
var Buffer = require('buffer').Buffer; // TODO just use global Buffer
}
/**
* A class representation of the BSON Binary type.
*
* Sub types
* - **BSON.BSON_BINARY_SUBTYPE_DEFAULT**, default BSON type.
* - **BSON.BSON_BINARY_SUBTYPE_FUNCTION**, BSON function type.
* - **BSON.BSON_BINARY_SUBTYPE_BYTE_ARRAY**, BSON byte array type.
* - **BSON.BSON_BINARY_SUBTYPE_UUID**, BSON uuid type.
* - **BSON.BSON_BINARY_SUBTYPE_MD5**, BSON md5 type.
* - **BSON.BSON_BINARY_SUBTYPE_USER_DEFINED**, BSON user defined type.
*
* @class
* @param {Buffer} buffer a buffer object containing the binary data.
* @param {Number} [subType] the option binary type.
* @return {Binary}
*/
function Binary(buffer, subType) {
if(!(this instanceof Binary)) return new Binary(buffer, subType);
this._bsontype = 'Binary';
if(buffer instanceof Number) {
this.sub_type = buffer;
this.position = 0;
} else {
this.sub_type = subType == null ? BSON_BINARY_SUBTYPE_DEFAULT : subType;
this.position = 0;
}
if(buffer != null && !(buffer instanceof Number)) {
// Only accept Buffer, Uint8Array or Arrays
if(typeof buffer == 'string') {
// Different ways of writing the length of the string for the different types
if(typeof Buffer != 'undefined') {
this.buffer = new Buffer(buffer);
} else if(typeof Uint8Array != 'undefined' || (Object.prototype.toString.call(buffer) == '[object Array]')) {
this.buffer = writeStringToArray(buffer);
} else {
throw new Error("only String, Buffer, Uint8Array or Array accepted");
}
} else {
this.buffer = buffer;
}
this.position = buffer.length;
} else {
if(typeof Buffer != 'undefined') {
this.buffer = new Buffer(Binary.BUFFER_SIZE);
} else if(typeof Uint8Array != 'undefined'){
this.buffer = new Uint8Array(new ArrayBuffer(Binary.BUFFER_SIZE));
} else {
this.buffer = new Array(Binary.BUFFER_SIZE);
}
// Set position to start of buffer
this.position = 0;
}
};
/**
* Updates this binary with byte_value.
*
* @method
* @param {string} byte_value a single byte we wish to write.
*/
Binary.prototype.put = function put(byte_value) {
// If it's a string and a has more than one character throw an error
if(byte_value['length'] != null && typeof byte_value != 'number' && byte_value.length != 1) throw new Error("only accepts single character String, Uint8Array or Array");
if(typeof byte_value != 'number' && byte_value < 0 || byte_value > 255) throw new Error("only accepts number in a valid unsigned byte range 0-255");
// Decode the byte value once
var decoded_byte = null;
if(typeof byte_value == 'string') {
decoded_byte = byte_value.charCodeAt(0);
} else if(byte_value['length'] != null) {
decoded_byte = byte_value[0];
} else {
decoded_byte = byte_value;
}
if(this.buffer.length > this.position) {
this.buffer[this.position++] = decoded_byte;
} else {
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
// Create additional overflow buffer
var buffer = new Buffer(Binary.BUFFER_SIZE + this.buffer.length);
// Combine the two buffers together
this.buffer.copy(buffer, 0, 0, this.buffer.length);
this.buffer = buffer;
this.buffer[this.position++] = decoded_byte;
} else {
var buffer = null;
// Create a new buffer (typed or normal array)
if(Object.prototype.toString.call(this.buffer) == '[object Uint8Array]') {
buffer = new Uint8Array(new ArrayBuffer(Binary.BUFFER_SIZE + this.buffer.length));
} else {
buffer = new Array(Binary.BUFFER_SIZE + this.buffer.length);
}
// We need to copy all the content to the new array
for(var i = 0; i < this.buffer.length; i++) {
buffer[i] = this.buffer[i];
}
// Reassign the buffer
this.buffer = buffer;
// Write the byte
this.buffer[this.position++] = decoded_byte;
}
}
};
/**
* Writes a buffer or string to the binary.
*
* @method
* @param {(Buffer|string)} string a string or buffer to be written to the Binary BSON object.
* @param {number} offset specify the binary of where to write the content.
* @return {null}
*/
Binary.prototype.write = function write(string, offset) {
offset = typeof offset == 'number' ? offset : this.position;
// If the buffer is to small let's extend the buffer
if(this.buffer.length < offset + string.length) {
var buffer = null;
// If we are in node.js
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
buffer = new Buffer(this.buffer.length + string.length);
this.buffer.copy(buffer, 0, 0, this.buffer.length);
} else if(Object.prototype.toString.call(this.buffer) == '[object Uint8Array]') {
// Create a new buffer
buffer = new Uint8Array(new ArrayBuffer(this.buffer.length + string.length))
// Copy the content
for(var i = 0; i < this.position; i++) {
buffer[i] = this.buffer[i];
}
}
// Assign the new buffer
this.buffer = buffer;
}
if(typeof Buffer != 'undefined' && Buffer.isBuffer(string) && Buffer.isBuffer(this.buffer)) {
string.copy(this.buffer, offset, 0, string.length);
this.position = (offset + string.length) > this.position ? (offset + string.length) : this.position;
// offset = string.length
} else if(typeof Buffer != 'undefined' && typeof string == 'string' && Buffer.isBuffer(this.buffer)) {
this.buffer.write(string, 'binary', offset);
this.position = (offset + string.length) > this.position ? (offset + string.length) : this.position;
// offset = string.length;
} else if(Object.prototype.toString.call(string) == '[object Uint8Array]'
|| Object.prototype.toString.call(string) == '[object Array]' && typeof string != 'string') {
for(var i = 0; i < string.length; i++) {
this.buffer[offset++] = string[i];
}
this.position = offset > this.position ? offset : this.position;
} else if(typeof string == 'string') {
for(var i = 0; i < string.length; i++) {
this.buffer[offset++] = string.charCodeAt(i);
}
this.position = offset > this.position ? offset : this.position;
}
};
/**
* Reads **length** bytes starting at **position**.
*
* @method
* @param {number} position read from the given position in the Binary.
* @param {number} length the number of bytes to read.
* @return {Buffer}
*/
Binary.prototype.read = function read(position, length) {
length = length && length > 0
? length
: this.position;
// Let's return the data based on the type we have
if(this.buffer['slice']) {
return this.buffer.slice(position, position + length);
} else {
// Create a buffer to keep the result
var buffer = typeof Uint8Array != 'undefined' ? new Uint8Array(new ArrayBuffer(length)) : new Array(length);
for(var i = 0; i < length; i++) {
buffer[i] = this.buffer[position++];
}
}
// Return the buffer
return buffer;
};
/**
* Returns the value of this binary as a string.
*
* @method
* @return {string}
*/
Binary.prototype.value = function value(asRaw) {
asRaw = asRaw == null ? false : asRaw;
// Optimize to serialize for the situation where the data == size of buffer
if(asRaw && typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer) && this.buffer.length == this.position)
return this.buffer;
// If it's a node.js buffer object
if(typeof Buffer != 'undefined' && Buffer.isBuffer(this.buffer)) {
return asRaw ? this.buffer.slice(0, this.position) : this.buffer.toString('binary', 0, this.position);
} else {
if(asRaw) {
// we support the slice command use it
if(this.buffer['slice'] != null) {
return this.buffer.slice(0, this.position);
} else {
// Create a new buffer to copy content to
var newBuffer = Object.prototype.toString.call(this.buffer) == '[object Uint8Array]' ? new Uint8Array(new ArrayBuffer(this.position)) : new Array(this.position);
// Copy content
for(var i = 0; i < this.position; i++) {
newBuffer[i] = this.buffer[i];
}
// Return the buffer
return newBuffer;
}
} else {
return convertArraytoUtf8BinaryString(this.buffer, 0, this.position);
}
}
};
/**
* Length.
*
* @method
* @return {number} the length of the binary.
*/
Binary.prototype.length = function length() {
return this.position;
};
/**
* @ignore
*/
Binary.prototype.toJSON = function() {
return this.buffer != null ? this.buffer.toString('base64') : '';
}
/**
* @ignore
*/
Binary.prototype.toString = function(format) {
return this.buffer != null ? this.buffer.slice(0, this.position).toString(format) : '';
}
/**
* Binary default subtype
* @ignore
*/
var BSON_BINARY_SUBTYPE_DEFAULT = 0;
/**
* @ignore
*/
var writeStringToArray = function(data) {
// Create a buffer
var buffer = typeof Uint8Array != 'undefined' ? new Uint8Array(new ArrayBuffer(data.length)) : new Array(data.length);
// Write the content to the buffer
for(var i = 0; i < data.length; i++) {
buffer[i] = data.charCodeAt(i);
}
// Write the string to the buffer
return buffer;
}
/**
* Convert Array ot Uint8Array to Binary String
*
* @ignore
*/
var convertArraytoUtf8BinaryString = function(byteArray, startIndex, endIndex) {
var result = "";
for(var i = startIndex; i < endIndex; i++) {
result = result + String.fromCharCode(byteArray[i]);
}
return result;
};
Binary.BUFFER_SIZE = 256;
/**
* Default BSON type
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_DEFAULT = 0;
/**
* Function BSON type
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_FUNCTION = 1;
/**
* Byte Array BSON type
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_BYTE_ARRAY = 2;
/**
* OLD UUID BSON type
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_UUID_OLD = 3;
/**
* UUID BSON type
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_UUID = 4;
/**
* MD5 BSON type
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_MD5 = 5;
/**
* User BSON type
*
* @classconstant SUBTYPE_DEFAULT
**/
Binary.SUBTYPE_USER_DEFINED = 128;
/**
* Expose.
*/
module.exports = Binary;
module.exports.Binary = Binary;

View File

@@ -1,274 +0,0 @@
/**
* Module dependencies.
* @ignore
*/
var BinaryParser = require('./binary_parser').BinaryParser;
/**
* Machine id.
*
* Create a random 3-byte value (i.e. unique for this
* process). Other drivers use a md5 of the machine id here, but
* that would mean an asyc call to gethostname, so we don't bother.
* @ignore
*/
var MACHINE_ID = parseInt(Math.random() * 0xFFFFFF, 10);
// Regular expression that checks for hex value
var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
/**
* Create a new ObjectID instance
*
* @class
* @param {(string|number)} id Can be a 24 byte hex string, 12 byte binary string or a Number.
* @property {number} generationTime The generation time of this ObjectId instance
* @return {ObjectID} instance of ObjectID.
*/
var ObjectID = function ObjectID(id) {
if(!(this instanceof ObjectID)) return new ObjectID(id);
if((id instanceof ObjectID)) return id;
this._bsontype = 'ObjectID';
var __id = null;
var valid = ObjectID.isValid(id);
// Throw an error if it's not a valid setup
if(!valid && id != null){
throw new Error("Argument passed in must be a single String of 12 bytes or a string of 24 hex characters");
} else if(valid && typeof id == 'string' && id.length == 24) {
return ObjectID.createFromHexString(id);
} else if(id == null || typeof id == 'number') {
// convert to 12 byte binary string
this.id = this.generate(id);
} else if(id != null && id.length === 12) {
// assume 12 byte string
this.id = id;
}
if(ObjectID.cacheHexString) this.__id = this.toHexString();
};
// Allow usage of ObjectId as well as ObjectID
var ObjectId = ObjectID;
// Precomputed hex table enables speedy hex string conversion
var hexTable = [];
for (var i = 0; i < 256; i++) {
hexTable[i] = (i <= 15 ? '0' : '') + i.toString(16);
}
/**
* Return the ObjectID id as a 24 byte hex string representation
*
* @method
* @return {string} return the 24 byte hex string representation.
*/
ObjectID.prototype.toHexString = function() {
if(ObjectID.cacheHexString && this.__id) return this.__id;
var hexString = '';
for (var i = 0; i < this.id.length; i++) {
hexString += hexTable[this.id.charCodeAt(i)];
}
if(ObjectID.cacheHexString) this.__id = hexString;
return hexString;
};
/**
* Update the ObjectID index used in generating new ObjectID's on the driver
*
* @method
* @return {number} returns next index value.
* @ignore
*/
ObjectID.prototype.get_inc = function() {
return ObjectID.index = (ObjectID.index + 1) % 0xFFFFFF;
};
/**
* Update the ObjectID index used in generating new ObjectID's on the driver
*
* @method
* @return {number} returns next index value.
* @ignore
*/
ObjectID.prototype.getInc = function() {
return this.get_inc();
};
/**
* Generate a 12 byte id string used in ObjectID's
*
* @method
* @param {number} [time] optional parameter allowing to pass in a second based timestamp.
* @return {string} return the 12 byte id binary string.
*/
ObjectID.prototype.generate = function(time) {
if ('number' != typeof time) {
time = parseInt(Date.now()/1000,10);
}
var time4Bytes = BinaryParser.encodeInt(time, 32, true, true);
/* for time-based ObjectID the bytes following the time will be zeroed */
var machine3Bytes = BinaryParser.encodeInt(MACHINE_ID, 24, false);
var pid2Bytes = BinaryParser.fromShort(typeof process === 'undefined' ? Math.floor(Math.random() * 100000) : process.pid % 0xFFFF);
var index3Bytes = BinaryParser.encodeInt(this.get_inc(), 24, false, true);
return time4Bytes + machine3Bytes + pid2Bytes + index3Bytes;
};
/**
* Converts the id into a 24 byte hex string for printing
*
* @return {String} return the 24 byte hex string representation.
* @ignore
*/
ObjectID.prototype.toString = function() {
return this.toHexString();
};
/**
* Converts to a string representation of this Id.
*
* @return {String} return the 24 byte hex string representation.
* @ignore
*/
ObjectID.prototype.inspect = ObjectID.prototype.toString;
/**
* Converts to its JSON representation.
*
* @return {String} return the 24 byte hex string representation.
* @ignore
*/
ObjectID.prototype.toJSON = function() {
return this.toHexString();
};
/**
* Compares the equality of this ObjectID with `otherID`.
*
* @method
* @param {object} otherID ObjectID instance to compare against.
* @return {boolean} the result of comparing two ObjectID's
*/
ObjectID.prototype.equals = function equals (otherID) {
if(otherID == null) return false;
var id = (otherID instanceof ObjectID || otherID.toHexString)
? otherID.id
: ObjectID.createFromHexString(otherID).id;
return this.id === id;
}
/**
* Returns the generation date (accurate up to the second) that this ID was generated.
*
* @method
* @return {date} the generation date
*/
ObjectID.prototype.getTimestamp = function() {
var timestamp = new Date();
timestamp.setTime(Math.floor(BinaryParser.decodeInt(this.id.substring(0,4), 32, true, true)) * 1000);
return timestamp;
}
/**
* @ignore
*/
ObjectID.index = parseInt(Math.random() * 0xFFFFFF, 10);
/**
* @ignore
*/
ObjectID.createPk = function createPk () {
return new ObjectID();
};
/**
* Creates an ObjectID from a second based number, with the rest of the ObjectID zeroed out. Used for comparisons or sorting the ObjectID.
*
* @method
* @param {number} time an integer number representing a number of seconds.
* @return {ObjectID} return the created ObjectID
*/
ObjectID.createFromTime = function createFromTime (time) {
var id = BinaryParser.encodeInt(time, 32, true, true) +
BinaryParser.encodeInt(0, 64, true, true);
return new ObjectID(id);
};
/**
* Creates an ObjectID from a hex string representation of an ObjectID.
*
* @method
* @param {string} hexString create a ObjectID from a passed in 24 byte hexstring.
* @return {ObjectID} return the created ObjectID
*/
ObjectID.createFromHexString = function createFromHexString (hexString) {
// Throw an error if it's not a valid setup
if(typeof hexString === 'undefined' || hexString != null && hexString.length != 24)
throw new Error("Argument passed in must be a single String of 12 bytes or a string of 24 hex characters");
var len = hexString.length;
if(len > 12*2) {
throw new Error('Id cannot be longer than 12 bytes');
}
var result = ''
, string
, number;
for (var index = 0; index < len; index += 2) {
string = hexString.substr(index, 2);
number = parseInt(string, 16);
result += BinaryParser.fromByte(number);
}
return new ObjectID(result, hexString);
};
/**
* Checks if a value is a valid bson ObjectId
*
* @method
* @return {boolean} return true if the value is a valid bson ObjectId, return false otherwise.
*/
ObjectID.isValid = function isValid(id) {
if(id == null) return false;
if(id != null && 'number' != typeof id && (id.length != 12 && id.length != 24)) {
return false;
} else {
// Check specifically for hex correctness
if(typeof id == 'string' && id.length == 24) return checkForHexRegExp.test(id);
return true;
}
};
/**
* @ignore
*/
Object.defineProperty(ObjectID.prototype, "generationTime", {
enumerable: true
, get: function () {
return Math.floor(BinaryParser.decodeInt(this.id.substring(0,4), 32, true, true));
}
, set: function (value) {
var value = BinaryParser.encodeInt(value, 32, true, true);
this.id = value + this.id.substr(4);
// delete this.__id;
this.toHexString();
}
});
/**
* Expose.
*/
module.exports = ObjectID;
module.exports.ObjectID = ObjectID;
module.exports.ObjectId = ObjectID;

View File

@@ -1 +0,0 @@
../node-pre-gyp/bin/node-pre-gyp.js

View File

@@ -1,30 +0,0 @@
## DNT config file
## see https://github.com/rvagg/dnt
NODE_VERSIONS="\
master \
v0.11.13 \
v0.10.30 \
v0.10.29 \
v0.10.28 \
v0.10.26 \
v0.10.25 \
v0.10.24 \
v0.10.23 \
v0.10.22 \
v0.10.21 \
v0.10.20 \
v0.10.19 \
v0.8.28 \
v0.8.27 \
v0.8.26 \
v0.8.24 \
"
OUTPUT_PREFIX="nan-"
TEST_CMD=" \
cd /dnt/ && \
npm install && \
node_modules/.bin/node-gyp --nodedir /usr/src/node/ rebuild --directory test && \
node_modules/.bin/tap --gc test/js/*-test.js \
"

View File

@@ -1,294 +0,0 @@
# NAN ChangeLog
**Version 1.8.4: current Node 12: 0.12.2, Node 10: 0.10.38, io.js: 1.8.1**
### 1.8.4 Apr 26 2015
- Build: Repackage
### 1.8.3 Apr 26 2015
- Bugfix: Include missing header 1af8648
### 1.8.2 Apr 23 2015
- Build: Repackage
### 1.8.1 Apr 23 2015
- Bugfix: NanObjectWrapHandle should take a pointer 155f1d3
### 1.8.0 Apr 23 2015
- Feature: Allow primitives with NanReturnValue 2e4475e
- Feature: Added comparison operators to NanCallback 55b075e
- Feature: Backport thread local storage 15bb7fa
- Removal: Remove support for signatures with arguments 8a2069d
- Correcteness: Replaced NanObjectWrapHandle macro with function 0bc6d59
### 1.7.0 Feb 28 2015
- Feature: Made NanCallback::Call accept optional target 8d54da7
- Feature: Support atom-shell 0.21 0b7f1bb
### 1.6.2 Feb 6 2015
- Bugfix: NanEncode: fix argument type for node::Encode on io.js 2be8639
### 1.6.1 Jan 23 2015
- Build: version bump
### 1.5.3 Jan 23 2015
- Build: repackage
### 1.6.0 Jan 23 2015
- Deprecated `NanNewContextHandle` in favor of `NanNew<Context>` 49259af
- Support utility functions moved in newer v8 versions (Node 0.11.15, io.js 1.0) a0aa179
- Added `NanEncode`, `NanDecodeBytes` and `NanDecodeWrite` 75e6fb9
### 1.5.2 Jan 23 2015
- Bugfix: Fix non-inline definition build error with clang++ 21d96a1, 60fadd4
- Bugfix: Readded missing String constructors 18d828f
- Bugfix: Add overload handling NanNew<FunctionTemplate>(..) 5ef813b
- Bugfix: Fix uv_work_cb versioning 997e4ae
- Bugfix: Add function factory and test 4eca89c
- Bugfix: Add object template factory and test cdcb951
- Correctness: Lifted an io.js related typedef c9490be
- Correctness: Make explicit downcasts of String lengths 00074e6
- Windows: Limit the scope of disabled warning C4530 83d7deb
### 1.5.1 Jan 15 2015
- Build: version bump
### 1.4.3 Jan 15 2015
- Build: version bump
### 1.4.2 Jan 15 2015
- Feature: Support io.js 0dbc5e8
### 1.5.0 Jan 14 2015
- Feature: Support io.js b003843
- Correctness: Improved NanNew internals 9cd4f6a
- Feature: Implement progress to NanAsyncWorker 8d6a160
### 1.4.1 Nov 8 2014
- Bugfix: Handle DEBUG definition correctly
- Bugfix: Accept int as Boolean
### 1.4.0 Nov 1 2014
- Feature: Added NAN_GC_CALLBACK 6a5c245
- Performance: Removed unnecessary local handle creation 18a7243, 41fe2f8
- Correctness: Added constness to references in NanHasInstance 02c61cd
- Warnings: Fixed spurious warnings from -Wundef and -Wshadow, 541b122, 99d8cb6
- Windoze: Shut Visual Studio up when compiling 8d558c1
- License: Switch to plain MIT from custom hacked MIT license 11de983
- Build: Added test target to Makefile e232e46
- Performance: Removed superfluous scope in NanAsyncWorker f4b7821
- Sugar/Feature: Added NanReturnThis() and NanReturnHolder() shorthands 237a5ff, d697208
- Feature: Added suitable overload of NanNew for v8::Integer::NewFromUnsigned b27b450
### 1.3.0 Aug 2 2014
- Added NanNew<v8::String, std::string>(std::string)
- Added NanNew<v8::String, std::string&>(std::string&)
- Added NanAsciiString helper class
- Added NanUtf8String helper class
- Added NanUcs2String helper class
- Deprecated NanRawString()
- Deprecated NanCString()
- Added NanGetIsolateData(v8::Isolate *isolate)
- Added NanMakeCallback(v8::Handle<v8::Object> target, v8::Handle<v8::Function> func, int argc, v8::Handle<v8::Value>* argv)
- Added NanMakeCallback(v8::Handle<v8::Object> target, v8::Handle<v8::String> symbol, int argc, v8::Handle<v8::Value>* argv)
- Added NanMakeCallback(v8::Handle<v8::Object> target, const char* method, int argc, v8::Handle<v8::Value>* argv)
- Added NanSetTemplate(v8::Handle<v8::Template> templ, v8::Handle<v8::String> name , v8::Handle<v8::Data> value, v8::PropertyAttribute attributes)
- Added NanSetPrototypeTemplate(v8::Local<v8::FunctionTemplate> templ, v8::Handle<v8::String> name, v8::Handle<v8::Data> value, v8::PropertyAttribute attributes)
- Added NanSetInstanceTemplate(v8::Local<v8::FunctionTemplate> templ, const char *name, v8::Handle<v8::Data> value)
- Added NanSetInstanceTemplate(v8::Local<v8::FunctionTemplate> templ, v8::Handle<v8::String> name, v8::Handle<v8::Data> value, v8::PropertyAttribute attributes)
### 1.2.0 Jun 5 2014
- Add NanSetPrototypeTemplate
- Changed NAN_WEAK_CALLBACK internals, switched _NanWeakCallbackData to class,
introduced _NanWeakCallbackDispatcher
- Removed -Wno-unused-local-typedefs from test builds
- Made test builds Windows compatible ('Sleep()')
### 1.1.2 May 28 2014
- Release to fix more stuff-ups in 1.1.1
### 1.1.1 May 28 2014
- Release to fix version mismatch in nan.h and lack of changelog entry for 1.1.0
### 1.1.0 May 25 2014
- Remove nan_isolate, use v8::Isolate::GetCurrent() internally instead
- Additional explicit overloads for NanNew(): (char*,int), (uint8_t*[,int]),
(uint16_t*[,int), double, int, unsigned int, bool, v8::String::ExternalStringResource*,
v8::String::ExternalAsciiStringResource*
- Deprecate NanSymbol()
- Added SetErrorMessage() and ErrorMessage() to NanAsyncWorker
### 1.0.0 May 4 2014
- Heavy API changes for V8 3.25 / Node 0.11.13
- Use cpplint.py
- Removed NanInitPersistent
- Removed NanPersistentToLocal
- Removed NanFromV8String
- Removed NanMakeWeak
- Removed NanNewLocal
- Removed NAN_WEAK_CALLBACK_OBJECT
- Removed NAN_WEAK_CALLBACK_DATA
- Introduce NanNew, replaces NanNewLocal, NanPersistentToLocal, adds many overloaded typed versions
- Introduce NanUndefined, NanNull, NanTrue and NanFalse
- Introduce NanEscapableScope and NanEscapeScope
- Introduce NanMakeWeakPersistent (requires a special callback to work on both old and new node)
- Introduce NanMakeCallback for node::MakeCallback
- Introduce NanSetTemplate
- Introduce NanGetCurrentContext
- Introduce NanCompileScript and NanRunScript
- Introduce NanAdjustExternalMemory
- Introduce NanAddGCEpilogueCallback, NanAddGCPrologueCallback, NanRemoveGCEpilogueCallback, NanRemoveGCPrologueCallback
- Introduce NanGetHeapStatistics
- Rename NanAsyncWorker#SavePersistent() to SaveToPersistent()
### 0.8.0 Jan 9 2014
- NanDispose -> NanDisposePersistent, deprecate NanDispose
- Extract _NAN_*_RETURN_TYPE, pull up NAN_*()
### 0.7.1 Jan 9 2014
- Fixes to work against debug builds of Node
- Safer NanPersistentToLocal (avoid reinterpret_cast)
- Speed up common NanRawString case by only extracting flattened string when necessary
### 0.7.0 Dec 17 2013
- New no-arg form of NanCallback() constructor.
- NanCallback#Call takes Handle rather than Local
- Removed deprecated NanCallback#Run method, use NanCallback#Call instead
- Split off _NAN_*_ARGS_TYPE from _NAN_*_ARGS
- Restore (unofficial) Node 0.6 compatibility at NanCallback#Call()
- Introduce NanRawString() for char* (or appropriate void*) from v8::String
(replacement for NanFromV8String)
- Introduce NanCString() for null-terminated char* from v8::String
### 0.6.0 Nov 21 2013
- Introduce NanNewLocal<T>(v8::Handle<T> value) for use in place of
v8::Local<T>::New(...) since v8 started requiring isolate in Node 0.11.9
### 0.5.2 Nov 16 2013
- Convert SavePersistent and GetFromPersistent in NanAsyncWorker from protected and public
### 0.5.1 Nov 12 2013
- Use node::MakeCallback() instead of direct v8::Function::Call()
### 0.5.0 Nov 11 2013
- Added @TooTallNate as collaborator
- New, much simpler, "include_dirs" for binding.gyp
- Added full range of NAN_INDEX_* macros to match NAN_PROPERTY_* macros
### 0.4.4 Nov 2 2013
- Isolate argument from v8::Persistent::MakeWeak removed for 0.11.8+
### 0.4.3 Nov 2 2013
- Include node_object_wrap.h, removed from node.h for Node 0.11.8.
### 0.4.2 Nov 2 2013
- Handle deprecation of v8::Persistent::Dispose(v8::Isolate* isolate)) for
Node 0.11.8 release.
### 0.4.1 Sep 16 2013
- Added explicit `#include <uv.h>` as it was removed from node.h for v0.11.8
### 0.4.0 Sep 2 2013
- Added NAN_INLINE and NAN_DEPRECATED and made use of them
- Added NanError, NanTypeError and NanRangeError
- Cleaned up code
### 0.3.2 Aug 30 2013
- Fix missing scope declaration in GetFromPersistent() and SaveToPersistent
in NanAsyncWorker
### 0.3.1 Aug 20 2013
- fix "not all control paths return a value" compile warning on some platforms
### 0.3.0 Aug 19 2013
- Made NAN work with NPM
- Lots of fixes to NanFromV8String, pulling in features from new Node core
- Changed node::encoding to Nan::Encoding in NanFromV8String to unify the API
- Added optional error number argument for NanThrowError()
- Added NanInitPersistent()
- Added NanReturnNull() and NanReturnEmptyString()
- Added NanLocker and NanUnlocker
- Added missing scopes
- Made sure to clear disposed Persistent handles
- Changed NanAsyncWorker to allocate error messages on the heap
- Changed NanThrowError(Local<Value>) to NanThrowError(Handle<Value>)
- Fixed leak in NanAsyncWorker when errmsg is used
### 0.2.2 Aug 5 2013
- Fixed usage of undefined variable with node::BASE64 in NanFromV8String()
### 0.2.1 Aug 5 2013
- Fixed 0.8 breakage, node::BUFFER encoding type not available in 0.8 for
NanFromV8String()
### 0.2.0 Aug 5 2013
- Added NAN_PROPERTY_GETTER, NAN_PROPERTY_SETTER, NAN_PROPERTY_ENUMERATOR,
NAN_PROPERTY_DELETER, NAN_PROPERTY_QUERY
- Extracted _NAN_METHOD_ARGS, _NAN_GETTER_ARGS, _NAN_SETTER_ARGS,
_NAN_PROPERTY_GETTER_ARGS, _NAN_PROPERTY_SETTER_ARGS,
_NAN_PROPERTY_ENUMERATOR_ARGS, _NAN_PROPERTY_DELETER_ARGS,
_NAN_PROPERTY_QUERY_ARGS
- Added NanGetInternalFieldPointer, NanSetInternalFieldPointer
- Added NAN_WEAK_CALLBACK, NAN_WEAK_CALLBACK_OBJECT,
NAN_WEAK_CALLBACK_DATA, NanMakeWeak
- Renamed THROW_ERROR to _NAN_THROW_ERROR
- Added NanNewBufferHandle(char*, size_t, node::smalloc::FreeCallback, void*)
- Added NanBufferUse(char*, uint32_t)
- Added NanNewContextHandle(v8::ExtensionConfiguration*,
v8::Handle<v8::ObjectTemplate>, v8::Handle<v8::Value>)
- Fixed broken NanCallback#GetFunction()
- Added optional encoding and size arguments to NanFromV8String()
- Added NanGetPointerSafe() and NanSetPointerSafe()
- Added initial test suite (to be expanded)
- Allow NanUInt32OptionValue to convert any Number object
### 0.1.0 Jul 21 2013
- Added `NAN_GETTER`, `NAN_SETTER`
- Added `NanThrowError` with single Local<Value> argument
- Added `NanNewBufferHandle` with single uint32_t argument
- Added `NanHasInstance(Persistent<FunctionTemplate>&, Handle<Value>)`
- Added `Local<Function> NanCallback#GetFunction()`
- Added `NanCallback#Call(int, Local<Value>[])`
- Deprecated `NanCallback#Run(int, Local<Value>[])` in favour of Call

View File

@@ -1,13 +0,0 @@
The MIT License (MIT)
=====================
Copyright (c) 2015 NAN contributors
-----------------------------------
*NAN contributors listed at <https://github.com/rvagg/nan#contributors>*
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@@ -1,38 +0,0 @@
# http://www.appveyor.com/docs/appveyor-yml
# Test against these versions of Io.js and Node.js.
environment:
matrix:
# node.js
- nodejs_version: "0.8"
- nodejs_version: "0.10"
- nodejs_version: "0.12"
# io.js
- nodejs_version: "1"
# Install scripts. (runs after repo cloning)
install:
# Get the latest stable version of Node 0.STABLE.latest
- ps: if($env:nodejs_version -eq "0.8") {Install-Product node $env:nodejs_version}
- ps: if($env:nodejs_version -ne "0.8") {Update-NodeJsInstallation (Get-NodeJsLatestBuild $env:nodejs_version)}
- IF NOT %nodejs_version% == 1 npm -g install npm
- IF NOT %nodejs_version% == 1 set PATH=%APPDATA%\npm;%PATH%
# Typical npm stuff.
- npm install
- IF %nodejs_version% == 0.8 node node_modules\node-gyp\bin\node-gyp.js rebuild --directory test
- IF NOT %nodejs_version% == 0.8 npm run rebuild-tests
# Post-install test scripts.
test_script:
# Output useful info for debugging.
- node --version
- npm --version
# run tests
- IF NOT %nodejs_version% == 1 npm test
- IF %nodejs_version% == 1 iojs node_modules\tap\bin\tap.js --gc test\js\*-test.js
# Don't actually build.
build: off
# Set build version format here instead of in the admin panel.
version: "{build}"

View File

@@ -1 +0,0 @@
console.log(require('path').relative('.', __dirname));

File diff suppressed because it is too large Load Diff

View File

@@ -1,261 +0,0 @@
/*********************************************************************
* NAN - Native Abstractions for Node.js
*
* Copyright (c) 2015 NAN contributors
*
* MIT License <https://github.com/rvagg/nan/blob/master/LICENSE.md>
********************************************************************/
#ifndef NAN_IMPLEMENTATION_12_INL_H_
#define NAN_IMPLEMENTATION_12_INL_H_
//==============================================================================
// node v0.11 implementation
//==============================================================================
#if defined(_MSC_VER)
# pragma warning( push )
# pragma warning( disable : 4530 )
# include <string>
# pragma warning( pop )
#else
# include <string>
#endif
namespace NanIntern {
//=== Array ====================================================================
Factory<v8::Array>::return_t
Factory<v8::Array>::New() {
return v8::Array::New(v8::Isolate::GetCurrent());
}
Factory<v8::Array>::return_t
Factory<v8::Array>::New(int length) {
return v8::Array::New(v8::Isolate::GetCurrent(), length);
}
//=== Boolean ==================================================================
Factory<v8::Boolean>::return_t
Factory<v8::Boolean>::New(bool value) {
return v8::Boolean::New(v8::Isolate::GetCurrent(), value);
}
//=== Boolean Object ===========================================================
Factory<v8::BooleanObject>::return_t
Factory<v8::BooleanObject>::New(bool value) {
return v8::BooleanObject::New(value).As<v8::BooleanObject>();
}
//=== Context ==================================================================
Factory<v8::Context>::return_t
Factory<v8::Context>::New( v8::ExtensionConfiguration* extensions
, v8::Handle<v8::ObjectTemplate> tmpl
, v8::Handle<v8::Value> obj) {
return v8::Context::New(v8::Isolate::GetCurrent(), extensions, tmpl, obj);
}
//=== Date =====================================================================
Factory<v8::Date>::return_t
Factory<v8::Date>::New(double value) {
return v8::Date::New(v8::Isolate::GetCurrent(), value).As<v8::Date>();
}
//=== External =================================================================
Factory<v8::External>::return_t
Factory<v8::External>::New(void * value) {
return v8::External::New(v8::Isolate::GetCurrent(), value);
}
//=== Function =================================================================
Factory<v8::Function>::return_t
Factory<v8::Function>::New( NanFunctionCallback callback
, v8::Handle<v8::Value> data) {
return v8::Function::New( v8::Isolate::GetCurrent()
, callback
, data);
}
//=== Function Template ========================================================
Factory<v8::FunctionTemplate>::return_t
Factory<v8::FunctionTemplate>::New( NanFunctionCallback callback
, v8::Handle<v8::Value> data
, v8::Handle<v8::Signature> signature) {
return v8::FunctionTemplate::New( v8::Isolate::GetCurrent()
, callback
, data
, signature);
}
//=== Number ===================================================================
Factory<v8::Number>::return_t
Factory<v8::Number>::New(double value) {
return v8::Number::New(v8::Isolate::GetCurrent(), value);
}
//=== Number Object ============================================================
Factory<v8::NumberObject>::return_t
Factory<v8::NumberObject>::New(double value) {
return v8::NumberObject::New( v8::Isolate::GetCurrent()
, value).As<v8::NumberObject>();
}
//=== Integer, Int32 and Uint32 ================================================
template <typename T>
typename IntegerFactory<T>::return_t
IntegerFactory<T>::New(int32_t value) {
return To<T>(T::New(v8::Isolate::GetCurrent(), value));
}
template <typename T>
typename IntegerFactory<T>::return_t
IntegerFactory<T>::New(uint32_t value) {
return To<T>(T::NewFromUnsigned(v8::Isolate::GetCurrent(), value));
}
Factory<v8::Uint32>::return_t
Factory<v8::Uint32>::New(int32_t value) {
return To<v8::Uint32>(
v8::Uint32::NewFromUnsigned(v8::Isolate::GetCurrent(), value));
}
Factory<v8::Uint32>::return_t
Factory<v8::Uint32>::New(uint32_t value) {
return To<v8::Uint32>(
v8::Uint32::NewFromUnsigned(v8::Isolate::GetCurrent(), value));
}
//=== Object ===================================================================
Factory<v8::Object>::return_t
Factory<v8::Object>::New() {
return v8::Object::New(v8::Isolate::GetCurrent());
}
//=== Object Template ==========================================================
Factory<v8::ObjectTemplate>::return_t
Factory<v8::ObjectTemplate>::New() {
return v8::ObjectTemplate::New(v8::Isolate::GetCurrent());
}
//=== RegExp ===================================================================
Factory<v8::RegExp>::return_t
Factory<v8::RegExp>::New(
v8::Handle<v8::String> pattern
, v8::RegExp::Flags flags) {
return v8::RegExp::New(pattern, flags);
}
//=== Script ===================================================================
Factory<v8::Script>::return_t
Factory<v8::Script>::New( v8::Local<v8::String> source) {
v8::ScriptCompiler::Source src(source);
return v8::ScriptCompiler::Compile(v8::Isolate::GetCurrent(), &src);
}
Factory<v8::Script>::return_t
Factory<v8::Script>::New( v8::Local<v8::String> source
, v8::ScriptOrigin const& origin) {
v8::ScriptCompiler::Source src(source, origin);
return v8::ScriptCompiler::Compile(v8::Isolate::GetCurrent(), &src);
}
//=== Signature ================================================================
Factory<v8::Signature>::return_t
Factory<v8::Signature>::New(Factory<v8::Signature>::FTH receiver) {
return v8::Signature::New(v8::Isolate::GetCurrent(), receiver);
}
//=== String ===================================================================
Factory<v8::String>::return_t
Factory<v8::String>::New() {
return v8::String::Empty(v8::Isolate::GetCurrent());
}
Factory<v8::String>::return_t
Factory<v8::String>::New(const char * value, int length) {
return v8::String::NewFromUtf8(v8::Isolate::GetCurrent(), value,
v8::String::kNormalString, length);
}
Factory<v8::String>::return_t
Factory<v8::String>::New(std::string const& value) {
assert(value.size() <= INT_MAX && "string too long");
return v8::String::NewFromUtf8(v8::Isolate::GetCurrent(),
value.data(), v8::String::kNormalString, static_cast<int>(value.size()));
}
Factory<v8::String>::return_t
Factory<v8::String>::New(const uint8_t * value, int length) {
return v8::String::NewFromOneByte(v8::Isolate::GetCurrent(), value,
v8::String::kNormalString, length);
}
Factory<v8::String>::return_t
Factory<v8::String>::New(const uint16_t * value, int length) {
return v8::String::NewFromTwoByte(v8::Isolate::GetCurrent(), value,
v8::String::kNormalString, length);
}
Factory<v8::String>::return_t
Factory<v8::String>::New(v8::String::ExternalStringResource * value) {
return v8::String::NewExternal(v8::Isolate::GetCurrent(), value);
}
Factory<v8::String>::return_t
Factory<v8::String>::New(NanExternalOneByteStringResource * value) {
return v8::String::NewExternal(v8::Isolate::GetCurrent(), value);
}
//=== String Object ============================================================
Factory<v8::StringObject>::return_t
Factory<v8::StringObject>::New(v8::Handle<v8::String> value) {
return v8::StringObject::New(value).As<v8::StringObject>();
}
//=== Unbound Script ===========================================================
Factory<v8::UnboundScript>::return_t
Factory<v8::UnboundScript>::New(v8::Local<v8::String> source) {
v8::ScriptCompiler::Source src(source);
return v8::ScriptCompiler::CompileUnbound(v8::Isolate::GetCurrent(), &src);
}
Factory<v8::UnboundScript>::return_t
Factory<v8::UnboundScript>::New( v8::Local<v8::String> source
, v8::ScriptOrigin const& origin) {
v8::ScriptCompiler::Source src(source, origin);
return v8::ScriptCompiler::CompileUnbound(v8::Isolate::GetCurrent(), &src);
}
} // end of namespace NanIntern
//=== Presistents and Handles ==================================================
template <typename T>
inline v8::Local<T> NanNew(v8::Handle<T> h) {
return v8::Local<T>::New(v8::Isolate::GetCurrent(), h);
}
template <typename T>
inline v8::Local<T> NanNew(v8::Persistent<T> const& p) {
return v8::Local<T>::New(v8::Isolate::GetCurrent(), p);
}
#endif // NAN_IMPLEMENTATION_12_INL_H_

View File

@@ -1,267 +0,0 @@
/*********************************************************************
* NAN - Native Abstractions for Node.js
*
* Copyright (c) 2015 NAN contributors
*
* MIT License <https://github.com/rvagg/nan/blob/master/LICENSE.md>
********************************************************************/
#ifndef NAN_IMPLEMENTATION_PRE_12_INL_H_
#define NAN_IMPLEMENTATION_PRE_12_INL_H_
#include <algorithm>
#if defined(_MSC_VER)
# pragma warning( push )
# pragma warning( disable : 4530 )
# include <string>
# include <vector>
# pragma warning( pop )
#else
# include <string>
# include <vector>
#endif
//==============================================================================
// node v0.10 implementation
//==============================================================================
namespace NanIntern {
//=== Array ====================================================================
Factory<v8::Array>::return_t
Factory<v8::Array>::New() {
return v8::Array::New();
}
Factory<v8::Array>::return_t
Factory<v8::Array>::New(int length) {
return v8::Array::New(length);
}
//=== Boolean ==================================================================
Factory<v8::Boolean>::return_t
Factory<v8::Boolean>::New(bool value) {
return v8::Boolean::New(value)->ToBoolean();
}
//=== Boolean Object ===========================================================
Factory<v8::BooleanObject>::return_t
Factory<v8::BooleanObject>::New(bool value) {
return v8::BooleanObject::New(value).As<v8::BooleanObject>();
}
//=== Context ==================================================================
Factory<v8::Context>::return_t
Factory<v8::Context>::New( v8::ExtensionConfiguration* extensions
, v8::Handle<v8::ObjectTemplate> tmpl
, v8::Handle<v8::Value> obj) {
v8::Persistent<v8::Context> ctx = v8::Context::New(extensions, tmpl, obj);
v8::Local<v8::Context> lctx = v8::Local<v8::Context>::New(ctx);
ctx.Dispose();
return lctx;
}
//=== Date =====================================================================
Factory<v8::Date>::return_t
Factory<v8::Date>::New(double value) {
return v8::Date::New(value).As<v8::Date>();
}
//=== External =================================================================
Factory<v8::External>::return_t
Factory<v8::External>::New(void * value) {
return v8::External::New(value);
}
//=== Function =================================================================
Factory<v8::Function>::return_t
Factory<v8::Function>::New( NanFunctionCallback callback
, v8::Handle<v8::Value> data) {
return Factory<v8::FunctionTemplate>::New( callback
, data
, v8::Handle<v8::Signature>()
)->GetFunction();
}
//=== FunctionTemplate =========================================================
Factory<v8::FunctionTemplate>::return_t
Factory<v8::FunctionTemplate>::New( NanFunctionCallback callback
, v8::Handle<v8::Value> data
, v8::Handle<v8::Signature> signature) {
// Note(agnat): Emulate length argument here. Unfortunately, I couldn't find
// a way. Have at it though...
return v8::FunctionTemplate::New( callback
, data
, signature);
}
//=== Number ===================================================================
Factory<v8::Number>::return_t
Factory<v8::Number>::New(double value) {
return v8::Number::New(value);
}
//=== Number Object ============================================================
Factory<v8::NumberObject>::return_t
Factory<v8::NumberObject>::New(double value) {
return v8::NumberObject::New(value).As<v8::NumberObject>();
}
//=== Integer, Int32 and Uint32 ================================================
template <typename T>
typename IntegerFactory<T>::return_t
IntegerFactory<T>::New(int32_t value) {
return To<T>(T::New(value));
}
template <typename T>
typename IntegerFactory<T>::return_t
IntegerFactory<T>::New(uint32_t value) {
return To<T>(T::NewFromUnsigned(value));
}
Factory<v8::Uint32>::return_t
Factory<v8::Uint32>::New(int32_t value) {
return To<v8::Uint32>(v8::Uint32::NewFromUnsigned(value));
}
Factory<v8::Uint32>::return_t
Factory<v8::Uint32>::New(uint32_t value) {
return To<v8::Uint32>(v8::Uint32::NewFromUnsigned(value));
}
//=== Object ===================================================================
Factory<v8::Object>::return_t
Factory<v8::Object>::New() {
return v8::Object::New();
}
//=== Object Template ==========================================================
Factory<v8::ObjectTemplate>::return_t
Factory<v8::ObjectTemplate>::New() {
return v8::ObjectTemplate::New();
}
//=== RegExp ===================================================================
Factory<v8::RegExp>::return_t
Factory<v8::RegExp>::New(
v8::Handle<v8::String> pattern
, v8::RegExp::Flags flags) {
return v8::RegExp::New(pattern, flags);
}
//=== Script ===================================================================
Factory<v8::Script>::return_t
Factory<v8::Script>::New( v8::Local<v8::String> source) {
return v8::Script::New(source);
}
Factory<v8::Script>::return_t
Factory<v8::Script>::New( v8::Local<v8::String> source
, v8::ScriptOrigin const& origin) {
return v8::Script::New(source, const_cast<v8::ScriptOrigin*>(&origin));
}
//=== Signature ================================================================
Factory<v8::Signature>::return_t
Factory<v8::Signature>::New(Factory<v8::Signature>::FTH receiver) {
return v8::Signature::New(receiver);
}
//=== String ===================================================================
Factory<v8::String>::return_t
Factory<v8::String>::New() {
return v8::String::Empty();
}
Factory<v8::String>::return_t
Factory<v8::String>::New(const char * value, int length) {
return v8::String::New(value, length);
}
Factory<v8::String>::return_t
Factory<v8::String>::New(std::string const& value) {
assert(value.size() <= INT_MAX && "string too long");
return v8::String::New( value.data(), static_cast<int>(value.size()));
}
inline
void
widenString(std::vector<uint16_t> *ws, const uint8_t *s, int l = -1) {
size_t len = static_cast<size_t>(l);
if (l < 0) {
len = strlen(reinterpret_cast<const char*>(s));
}
assert(len <= INT_MAX && "string too long");
ws->resize(len);
std::copy(s, s + len, ws->begin());
}
Factory<v8::String>::return_t
Factory<v8::String>::New(const uint16_t * value, int length) {
return v8::String::New(value, length);
}
Factory<v8::String>::return_t
Factory<v8::String>::New(const uint8_t * value, int length) {
std::vector<uint16_t> wideString;
widenString(&wideString, value, length);
if (wideString.size() == 0) {
return v8::String::Empty();
} else {
return v8::String::New(&wideString.front()
, static_cast<int>(wideString.size()));
}
}
Factory<v8::String>::return_t
Factory<v8::String>::New(v8::String::ExternalStringResource * value) {
return v8::String::NewExternal(value);
}
Factory<v8::String>::return_t
Factory<v8::String>::New(v8::String::ExternalAsciiStringResource * value) {
return v8::String::NewExternal(value);
}
//=== String Object ============================================================
Factory<v8::StringObject>::return_t
Factory<v8::StringObject>::New(v8::Handle<v8::String> value) {
return v8::StringObject::New(value).As<v8::StringObject>();
}
} // end of namespace NanIntern
//=== Presistents and Handles ==================================================
template <typename T>
inline v8::Local<T> NanNew(v8::Handle<T> h) {
return v8::Local<T>::New(h);
}
template <typename T>
inline v8::Local<T> NanNew(v8::Persistent<T> const& p) {
return v8::Local<T>::New(p);
}
#endif // NAN_IMPLEMENTATION_PRE_12_INL_H_

View File

@@ -1,328 +0,0 @@
/*********************************************************************
* NAN - Native Abstractions for Node.js
*
* Copyright (c) 2015 NAN contributors
*
* MIT License <https://github.com/rvagg/nan/blob/master/LICENSE.md>
********************************************************************/
#ifndef NAN_NEW_H_
#define NAN_NEW_H_
#if defined(_MSC_VER)
# pragma warning( push )
# pragma warning( disable : 4530 )
# include <string>
# pragma warning( pop )
#else
# include <string>
#endif
namespace NanIntern { // scnr
// TODO(agnat): Generalize
template <typename T> v8::Local<T> To(v8::Handle<v8::Integer> i);
template <>
inline
v8::Local<v8::Integer>
To<v8::Integer>(v8::Handle<v8::Integer> i) { return i->ToInteger(); }
template <>
inline
v8::Local<v8::Int32>
To<v8::Int32>(v8::Handle<v8::Integer> i) { return i->ToInt32(); }
template <>
inline
v8::Local<v8::Uint32>
To<v8::Uint32>(v8::Handle<v8::Integer> i) { return i->ToUint32(); }
template <typename T> struct FactoryBase { typedef v8::Local<T> return_t; };
template <typename T> struct Factory;
template <>
struct Factory<v8::Array> : FactoryBase<v8::Array> {
static inline return_t New();
static inline return_t New(int length);
};
template <>
struct Factory<v8::Boolean> : FactoryBase<v8::Boolean> {
static inline return_t New(bool value);
};
template <>
struct Factory<v8::BooleanObject> : FactoryBase<v8::BooleanObject> {
static inline return_t New(bool value);
};
template <>
struct Factory<v8::Context> : FactoryBase<v8::Context> {
static inline
return_t
New( v8::ExtensionConfiguration* extensions = NULL
, v8::Handle<v8::ObjectTemplate> tmpl = v8::Handle<v8::ObjectTemplate>()
, v8::Handle<v8::Value> obj = v8::Handle<v8::Value>());
};
template <>
struct Factory<v8::Date> : FactoryBase<v8::Date> {
static inline return_t New(double value);
};
template <>
struct Factory<v8::External> : FactoryBase<v8::External> {
static inline return_t New(void *value);
};
template <>
struct Factory<v8::Function> : FactoryBase<v8::Function> {
static inline
return_t
New( NanFunctionCallback callback
, v8::Handle<v8::Value> data = v8::Handle<v8::Value>());
};
template <>
struct Factory<v8::FunctionTemplate> : FactoryBase<v8::FunctionTemplate> {
static inline
return_t
New( NanFunctionCallback callback = NULL
, v8::Handle<v8::Value> data = v8::Handle<v8::Value>()
, v8::Handle<v8::Signature> signature = v8::Handle<v8::Signature>());
};
template <>
struct Factory<v8::Number> : FactoryBase<v8::Number> {
static inline return_t New(double value);
};
template <>
struct Factory<v8::NumberObject> : FactoryBase<v8::NumberObject> {
static inline return_t New(double value);
};
template <typename T>
struct IntegerFactory : FactoryBase<T> {
typedef typename FactoryBase<T>::return_t return_t;
static inline return_t New(int32_t value);
static inline return_t New(uint32_t value);
};
template <>
struct Factory<v8::Integer> : IntegerFactory<v8::Integer> {};
template <>
struct Factory<v8::Int32> : IntegerFactory<v8::Int32> {};
template <>
struct Factory<v8::Uint32> : FactoryBase<v8::Uint32> {
static inline return_t New(int32_t value);
static inline return_t New(uint32_t value);
};
template <>
struct Factory<v8::Object> : FactoryBase<v8::Object> {
static inline return_t New();
};
template <>
struct Factory<v8::ObjectTemplate> : FactoryBase<v8::ObjectTemplate> {
static inline return_t New();
};
template <>
struct Factory<v8::RegExp> : FactoryBase<v8::RegExp> {
static inline return_t New(
v8::Handle<v8::String> pattern, v8::RegExp::Flags flags);
};
template <>
struct Factory<v8::Script> : FactoryBase<v8::Script> {
static inline return_t New( v8::Local<v8::String> source);
static inline return_t New( v8::Local<v8::String> source
, v8::ScriptOrigin const& origin);
};
template <>
struct Factory<v8::Signature> : FactoryBase<v8::Signature> {
typedef v8::Handle<v8::FunctionTemplate> FTH;
static inline return_t New(FTH receiver = FTH());
};
template <>
struct Factory<v8::String> : FactoryBase<v8::String> {
static inline return_t New();
static inline return_t New(const char *value, int length = -1);
static inline return_t New(const uint16_t *value, int length = -1);
static inline return_t New(std::string const& value);
static inline return_t New(v8::String::ExternalStringResource * value);
static inline return_t New(NanExternalOneByteStringResource * value);
// TODO(agnat): Deprecate.
static inline return_t New(const uint8_t * value, int length = -1);
};
template <>
struct Factory<v8::StringObject> : FactoryBase<v8::StringObject> {
static inline return_t New(v8::Handle<v8::String> value);
};
} // end of namespace NanIntern
#if (NODE_MODULE_VERSION >= 12)
namespace NanIntern {
template <>
struct Factory<v8::UnboundScript> : FactoryBase<v8::UnboundScript> {
static inline return_t New( v8::Local<v8::String> source);
static inline return_t New( v8::Local<v8::String> source
, v8::ScriptOrigin const& origin);
};
} // end of namespace NanIntern
# include "nan_implementation_12_inl.h"
#else // NODE_MODULE_VERSION >= 12
# include "nan_implementation_pre_12_inl.h"
#endif
//=== API ======================================================================
template <typename T>
typename NanIntern::Factory<T>::return_t
NanNew() {
return NanIntern::Factory<T>::New();
}
template <typename T, typename A0>
typename NanIntern::Factory<T>::return_t
NanNew(A0 arg0) {
return NanIntern::Factory<T>::New(arg0);
}
template <typename T, typename A0, typename A1>
typename NanIntern::Factory<T>::return_t
NanNew(A0 arg0, A1 arg1) {
return NanIntern::Factory<T>::New(arg0, arg1);
}
template <typename T, typename A0, typename A1, typename A2>
typename NanIntern::Factory<T>::return_t
NanNew(A0 arg0, A1 arg1, A2 arg2) {
return NanIntern::Factory<T>::New(arg0, arg1, arg2);
}
template <typename T, typename A0, typename A1, typename A2, typename A3>
typename NanIntern::Factory<T>::return_t
NanNew(A0 arg0, A1 arg1, A2 arg2, A3 arg3) {
return NanIntern::Factory<T>::New(arg0, arg1, arg2, arg3);
}
// Note(agnat): When passing overloaded function pointers to template functions
// as generic arguments the compiler needs help in picking the right overload.
// These two functions handle NanNew<Function> and NanNew<FunctionTemplate> with
// all argument variations.
// v8::Function and v8::FunctionTemplate with one or two arguments
template <typename T>
typename NanIntern::Factory<T>::return_t
NanNew( NanFunctionCallback callback
, v8::Handle<v8::Value> data = v8::Handle<v8::Value>()) {
return NanIntern::Factory<T>::New(callback, data);
}
// v8::Function and v8::FunctionTemplate with three arguments
template <typename T, typename A2>
typename NanIntern::Factory<T>::return_t
NanNew( NanFunctionCallback callback
, v8::Handle<v8::Value> data = v8::Handle<v8::Value>()
, A2 a2 = A2()) {
return NanIntern::Factory<T>::New(callback, data, a2);
}
// Convenience
template <typename T> inline v8::Local<T> NanNew(v8::Handle<T> h);
template <typename T> inline v8::Local<T> NanNew(v8::Persistent<T> const& p);
inline
NanIntern::Factory<v8::Boolean>::return_t
NanNew(bool value) {
return NanNew<v8::Boolean>(value);
}
inline
NanIntern::Factory<v8::Int32>::return_t
NanNew(int32_t value) {
return NanNew<v8::Int32>(value);
}
inline
NanIntern::Factory<v8::Uint32>::return_t
NanNew(uint32_t value) {
return NanNew<v8::Uint32>(value);
}
inline
NanIntern::Factory<v8::Number>::return_t
NanNew(double value) {
return NanNew<v8::Number>(value);
}
inline
NanIntern::Factory<v8::String>::return_t
NanNew(std::string const& value) {
return NanNew<v8::String>(value);
}
inline
NanIntern::Factory<v8::String>::return_t
NanNew(const char * value, int length) {
return NanNew<v8::String>(value, length);
}
inline
NanIntern::Factory<v8::String>::return_t
NanNew(const char * value) {
return NanNew<v8::String>(value);
}
inline
NanIntern::Factory<v8::String>::return_t
NanNew(const uint8_t * value) {
return NanNew<v8::String>(value);
}
inline
NanIntern::Factory<v8::String>::return_t
NanNew(const uint16_t * value) {
return NanNew<v8::String>(value);
}
inline
NanIntern::Factory<v8::String>::return_t
NanNew(v8::String::ExternalStringResource * value) {
return NanNew<v8::String>(value);
}
inline
NanIntern::Factory<v8::String>::return_t
NanNew(NanExternalOneByteStringResource * value) {
return NanNew<v8::String>(value);
}
inline
NanIntern::Factory<v8::RegExp>::return_t
NanNew(v8::Handle<v8::String> pattern, v8::RegExp::Flags flags) {
return NanNew<v8::RegExp>(pattern, flags);
}
#endif // NAN_NEW_H_

View File

@@ -1,312 +0,0 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
#ifndef NAN_STRING_BYTES_H_
#define NAN_STRING_BYTES_H_
// Decodes a v8::Handle<v8::String> or Buffer to a raw char*
#include <node.h>
#include <node_buffer.h>
#include <assert.h>
#include <string.h> // memcpy
#include <limits.h>
namespace NanIntern {
using v8::Local;
using v8::Handle;
using v8::Object;
using v8::String;
using v8::Value;
//// Base 64 ////
#define base64_encoded_size(size) ((size + 2 - ((size + 2) % 3)) / 3 * 4)
//// Nan::HEX ////
static bool contains_non_ascii_slow(const char* buf, size_t len) {
for (size_t i = 0; i < len; ++i) {
if (buf[i] & 0x80) return true;
}
return false;
}
static bool contains_non_ascii(const char* src, size_t len) {
if (len < 16) {
return contains_non_ascii_slow(src, len);
}
const unsigned bytes_per_word = sizeof(void*);
const unsigned align_mask = bytes_per_word - 1;
const unsigned unaligned = reinterpret_cast<uintptr_t>(src) & align_mask;
if (unaligned > 0) {
const unsigned n = bytes_per_word - unaligned;
if (contains_non_ascii_slow(src, n)) return true;
src += n;
len -= n;
}
#if defined(__x86_64__) || defined(_WIN64)
const uintptr_t mask = 0x8080808080808080ll;
#else
const uintptr_t mask = 0x80808080l;
#endif
const uintptr_t* srcw = reinterpret_cast<const uintptr_t*>(src);
for (size_t i = 0, n = len / bytes_per_word; i < n; ++i) {
if (srcw[i] & mask) return true;
}
const unsigned remainder = len & align_mask;
if (remainder > 0) {
const size_t offset = len - remainder;
if (contains_non_ascii_slow(src + offset, remainder)) return true;
}
return false;
}
static void force_ascii_slow(const char* src, char* dst, size_t len) {
for (size_t i = 0; i < len; ++i) {
dst[i] = src[i] & 0x7f;
}
}
static void force_ascii(const char* src, char* dst, size_t len) {
if (len < 16) {
force_ascii_slow(src, dst, len);
return;
}
const unsigned bytes_per_word = sizeof(void*);
const unsigned align_mask = bytes_per_word - 1;
const unsigned src_unalign = reinterpret_cast<uintptr_t>(src) & align_mask;
const unsigned dst_unalign = reinterpret_cast<uintptr_t>(dst) & align_mask;
if (src_unalign > 0) {
if (src_unalign == dst_unalign) {
const unsigned unalign = bytes_per_word - src_unalign;
force_ascii_slow(src, dst, unalign);
src += unalign;
dst += unalign;
len -= src_unalign;
} else {
force_ascii_slow(src, dst, len);
return;
}
}
#if defined(__x86_64__) || defined(_WIN64)
const uintptr_t mask = ~0x8080808080808080ll;
#else
const uintptr_t mask = ~0x80808080l;
#endif
const uintptr_t* srcw = reinterpret_cast<const uintptr_t*>(src);
uintptr_t* dstw = reinterpret_cast<uintptr_t*>(dst);
for (size_t i = 0, n = len / bytes_per_word; i < n; ++i) {
dstw[i] = srcw[i] & mask;
}
const unsigned remainder = len & align_mask;
if (remainder > 0) {
const size_t offset = len - remainder;
force_ascii_slow(src + offset, dst + offset, remainder);
}
}
static size_t base64_encode(const char* src,
size_t slen,
char* dst,
size_t dlen) {
// We know how much we'll write, just make sure that there's space.
assert(dlen >= base64_encoded_size(slen) &&
"not enough space provided for base64 encode");
dlen = base64_encoded_size(slen);
unsigned a;
unsigned b;
unsigned c;
unsigned i;
unsigned k;
unsigned n;
static const char table[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/";
i = 0;
k = 0;
n = slen / 3 * 3;
while (i < n) {
a = src[i + 0] & 0xff;
b = src[i + 1] & 0xff;
c = src[i + 2] & 0xff;
dst[k + 0] = table[a >> 2];
dst[k + 1] = table[((a & 3) << 4) | (b >> 4)];
dst[k + 2] = table[((b & 0x0f) << 2) | (c >> 6)];
dst[k + 3] = table[c & 0x3f];
i += 3;
k += 4;
}
if (n != slen) {
switch (slen - n) {
case 1:
a = src[i + 0] & 0xff;
dst[k + 0] = table[a >> 2];
dst[k + 1] = table[(a & 3) << 4];
dst[k + 2] = '=';
dst[k + 3] = '=';
break;
case 2:
a = src[i + 0] & 0xff;
b = src[i + 1] & 0xff;
dst[k + 0] = table[a >> 2];
dst[k + 1] = table[((a & 3) << 4) | (b >> 4)];
dst[k + 2] = table[(b & 0x0f) << 2];
dst[k + 3] = '=';
break;
}
}
return dlen;
}
static size_t hex_encode(const char* src, size_t slen, char* dst, size_t dlen) {
// We know how much we'll write, just make sure that there's space.
assert(dlen >= slen * 2 &&
"not enough space provided for hex encode");
dlen = slen * 2;
for (uint32_t i = 0, k = 0; k < dlen; i += 1, k += 2) {
static const char hex[] = "0123456789abcdef";
uint8_t val = static_cast<uint8_t>(src[i]);
dst[k + 0] = hex[val >> 4];
dst[k + 1] = hex[val & 15];
}
return dlen;
}
static Local<Value> Encode(const char* buf,
size_t buflen,
enum Nan::Encoding encoding) {
assert(buflen <= node::Buffer::kMaxLength);
if (!buflen && encoding != Nan::BUFFER)
return NanNew("");
Local<String> val;
switch (encoding) {
case Nan::BUFFER:
return NanNewBufferHandle(buf, buflen);
case Nan::ASCII:
if (contains_non_ascii(buf, buflen)) {
char* out = new char[buflen];
force_ascii(buf, out, buflen);
val = NanNew<String>(out, buflen);
delete[] out;
} else {
val = NanNew<String>(buf, buflen);
}
break;
case Nan::UTF8:
val = NanNew<String>(buf, buflen);
break;
case Nan::BINARY: {
// TODO(isaacs) use ExternalTwoByteString?
const unsigned char *cbuf = reinterpret_cast<const unsigned char*>(buf);
uint16_t * twobytebuf = new uint16_t[buflen];
for (size_t i = 0; i < buflen; i++) {
// XXX is the following line platform independent?
twobytebuf[i] = cbuf[i];
}
val = NanNew<String>(twobytebuf, buflen);
delete[] twobytebuf;
break;
}
case Nan::BASE64: {
size_t dlen = base64_encoded_size(buflen);
char* dst = new char[dlen];
size_t written = base64_encode(buf, buflen, dst, dlen);
assert(written == dlen);
val = NanNew<String>(dst, dlen);
delete[] dst;
break;
}
case Nan::UCS2: {
const uint16_t* data = reinterpret_cast<const uint16_t*>(buf);
val = NanNew<String>(data, buflen / 2);
break;
}
case Nan::HEX: {
size_t dlen = buflen * 2;
char* dst = new char[dlen];
size_t written = hex_encode(buf, buflen, dst, dlen);
assert(written == dlen);
val = NanNew<String>(dst, dlen);
delete[] dst;
break;
}
default:
assert(0 && "unknown encoding");
break;
}
return val;
}
#undef base64_encoded_size
} // namespace NanIntern
#endif // NAN_STRING_BYTES_H_

File diff suppressed because one or more lines are too long

View File

@@ -1,18 +0,0 @@
.gitignore
.npmignore
.DS_Store
nwsnapshot
node-webkit*.zip
credits.html
node-webkit.app
npm-debug.log
node_modules
test/
test.sh
test.bat
test-node-webkit.sh
test-custom-target.sh
scripts
.travis.yml
TODO.md
*.tgz

View File

@@ -1,41 +0,0 @@
language: cpp
# http://docs.travis-ci.com/user/multi-os/
os:
- linux
- osx
env:
matrix:
- NODE_VERSION="0.10.36"
- NODE_VERSION="0.12.0"
- NODE_VERSION="iojs-v1.2.0"
global:
- secure: nxPEbegqL+ckZ03BvGJSOlfcNuoJvj+rkLbBmmOEuTrk79lvY0+gjpmvWu4gGXMt89Vz+iAJB29ERaUdriKzlmYmebhWEdwQ/aayUv2sNA0eduvr4TALW2iLfLqryeE4449xnuEvz469AVWxO8xoX9KgmrwTLnkMR9SbQHxB6jU=
- secure: Fs0ilCVBL0DUMkd5vzLGL/5K364kLj1LpSKzwZpPvlU6Gx2jW+Zt59RfM1yOTCvrPWWNb+jc8jz8lbbPXAgUnYNmL54WkIjapr8yPIClw6AtNOSuPtL+zGVGT7FZEO7EntNuEtTer6Wj8IuWUT7myeAezKzNogYg7HyaO4JsoJc=
- secure: ovgkAH1h18pGmoYWN/2tWLF1lqaAK74eK1vnNFB5FMq3wTZYXpzM1W9nT3uqaDsNBUbs/groMcCDU/WBvfQtz9DcocVkuSCVgQ+6MaVVIuH7z7erfIe2sNdq0yhjT4KgBjTZcu6ccVSLjNNQwWctOiPuVgMOLacpN1PuPfs7KCk=
before_install:
- echo $NODE_VERSION
# upgrade nvm
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
- source ~/.nvm/nvm.sh
- nvm install $NODE_VERSION
- nvm use $NODE_VERSION
- node --version
- npm --version
- npm update -g npm
- npm --version
install:
- npm install
before_script:
- npm test
# test node-webkit usage
- ./scripts/test-node-webkit.sh
script:
# test non-authenticated mode
- unset node_pre_gyp_accessKeyId
- npm test

View File

@@ -1,210 +0,0 @@
# node-pre-gyp changelog
## 0.6.4
- Improved support for `io.js` (@fengmk2)
- Test coverage improvements (@mikemorris)
- Fixed support for `--dist-url` that regressed in 0.6.3
## 0.6.3
- Added support for passing raw options to node-gyp using `--` separator. Flags passed after
the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed
after the `--` will be passed directly to make/visual studio.
- Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly
- Fix issue with require validation not working on windows 7 (@edgarsilva)
## 0.6.2
- Support for io.js >= v1.0.2
- Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`.
## 0.6.1
- Fixed bundled `tar` version
## 0.6.0
- BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124)
- Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`.
- Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place.
- Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped.
- Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version`
## 0.5.31
- Added support for deducing node_abi for node.js runtime from previous release if the series is even
- Added support for --target=0.10.33
## 0.5.30
- Repackaged with latest bundled deps
## 0.5.29
- Added support for semver `build`.
- Fixed support for downloading from urls that include `+`.
## 0.5.28
- Now reporting unix style paths only in reveal command
## 0.5.27
- Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo
- Fixed support for installing when path contains a `'` - @halfdan
- Ported tests to mocha
## 0.5.26
- Fix node-webkit support when `--target` option is not provided
## 0.5.25
- Fix bundling of deps
## 0.5.24
- Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31
## 0.5.23
- Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value
- Added support for `--silent` (shortcut for `--loglevel=silent`)
## 0.5.22
- Fixed node-webkit versioning name (NOTE: node-webkit support still experimental)
## 0.5.21
- New package to fix `shasum check failed` error with v0.5.20
## 0.5.20
- Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90)
## 0.5.19
- Updated to know about more node-webkit releases
## 0.5.18
- Updated to know about more node-webkit releases
## 0.5.17
- Updated to know about node v0.10.29 release
## 0.5.16
- Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86)
## 0.5.15
- Fixed installation of windows packages sub directories on unix systems (#84)
## 0.5.14
- Finished support for cross building using `--target_platform` option (#82)
- Now skipping binary validation on install if target arch/platform do not match the host.
- Removed multi-arch validing for OS X since it required a FAT node.js binary
## 0.5.13
- Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled.
## 0.5.12
- Improved support for node-webkit (@Mithgol)
## 0.5.11
- Updated target versions listing
## 0.5.10
- Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72)
- Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary
- Failed install due to `testbinary` check failure no longer leaves behind binary (#70)
## 0.5.9
- Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60)
## 0.5.8
- Started bundling deps
## 0.5.7
- Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63)
- Exposed the internal `testbinary` command in node-pre-gyp command line tool
- Fixed minor bug so that `fallback_to_build` option is always respected
## 0.5.6
- Added support for versioning on the `name` value in `package.json` (#57).
- Moved to using streams for reading tarball when publishing (#52)
## 0.5.5
- Improved binary validation that also now works with node-webkit (@Mithgol)
- Upgraded test apps to work with node v0.11.x
- Improved test coverage
## 0.5.4
- No longer depends on external install of node-gyp for compiling builds.
## 0.5.3
- Reverted fix for debian/nodejs since it broke windows (#45)
## 0.5.2
- Support for debian systems where the node binary is named `nodejs` (#45)
- Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed)
- Updated abi-crosswalk with node v0.10.26 entry.
## 0.5.1
- Various minor bug fixes, several improving windows support for publishing.
## 0.5.0
- Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`.
- Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18).
- Added `remote_path` which also supports versioning.
- Changed `remote_uri` to `host`.
## 0.4.2
- Added support for `--target` flag to request cross-compile against a specific node/node-webkit version.
- Added preliminary support for node-webkit
- Fixed support for `--target_arch` option being respected in all cases.
## 0.4.1
- Fixed exception when only stderr is available in binary test (@bendi / #31)
## 0.4.0
- Enforce only `https:` based remote publishing access.
- Added `node-pre-gyp info` command to display listing of published binaries
- Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option.
- Added support for S3 prefixes.
## 0.3.1
- Added `unpublish` command.
- Fixed module path construction in tests.
- Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target.
## 0.3.0
- Support for packaging all files in `module_path` directory - see `app4` for example
- Added `testpackage` command.
- Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that.
- `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks.

View File

@@ -1,27 +0,0 @@
Copyright (c), Mapbox
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of node-pre-gyp nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -1,581 +0,0 @@
# node-pre-gyp
#### node-pre-gyp makes it easy to publish and install Node.js C++ addons from binaries
[![NPM](https://nodei.co/npm/node-pre-gyp.png?downloads=true&downloadRank=true)](https://nodei.co/npm/node-pre-gyp/)
[![Build Status](https://api.travis-ci.org/mapbox/node-pre-gyp.svg)](https://travis-ci.org/mapbox/node-pre-gyp)
[![Build status](https://ci.appveyor.com/api/projects/status/3nxewb425y83c0gv)](https://ci.appveyor.com/project/Mapbox/node-pre-gyp)
[![Dependencies](https://david-dm.org/mapbox/node-pre-gyp.svg)](https://david-dm.org/mapbox/node-pre-gyp)
`node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment.
### Features
- A command line tool called `node-pre-gyp` that can install your package's c++ module from a binary.
- A variety of developer targeted commands for packaging, testing, and publishing binaries.
- A Javascript module that can dynamically require your installed binary: `require('node-pre-gyp').find`
For a hello world example of a module packaged with `node-pre-gyp` see <https://github.com/springmeyer/node-addon-example> and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples.
## Credits
- The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate)
- Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost).
- Development is sponsored by [Mapbox](https://www.mapbox.com/)
## FAQ
See the [Frequently Ask Questions](https://github.com/mapbox/node-pre-gyp/wiki/FAQ).
## Depends
- Node.js 0.12.x -> 0.8.x
## Install
`node-pre-gyp` is designed to be installed as a local dependency of your Node.js C++ addon and accessed like:
./node_modules/.bin/node-pre-gyp --help
But you can also install it globally:
npm install node-pre-gyp -g
## Usage
### Commands
View all possible commands:
node-pre-gyp --help
- clean - Remove the entire folder containing the compiled .node module
- install - Install pre-built binary for module
- reinstall - Run "clean" and "install" at once
- build - Compile the module by dispatching to node-gyp or nw-gyp
- rebuild - Run "clean" and "build" at once
- package - Pack binary into tarball
- testpackage - Test that the staged package is valid
- publish - Publish pre-built binary
- unpublish - Unpublish pre-built binary
- info - Fetch info on published binaries
You can also chain commands:
node-pre-gyp clean build unpublish publish info
### Options
Options include:
- `-C/--directory`: run the command in this directory
- `--build-from-source`: build from source instead of using pre-built binary
- `--runtime=node-webkit`: customize the runtime: `node` and `node-webkit` are the valid options
- `--fallback-to-build`: fallback to building from source if pre-built binary is not available
- `--target=0.10.25`: Pass the target node or node-webkit version to compile against
- `--target_arch=ia32`: Pass the target arch and override the host `arch`. Valid values are 'ia32','x64', or `arm`.
- `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`.
Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module.
For example: `npm install --build-from-source=myapp`. This is useful if:
- `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependent with `npm install`.
- The larger app also depends on other modules installed with `node-pre-gyp`
- You only want to trigger a source compile for `myapp` and the other modules.
### Configuring
This is a guide to configuring your module to use node-pre-gyp.
#### 1) Add new entries to your `package.json`
- Add `node-pre-gyp` to `bundledDependencies`
- Add `aws-sdk` as a `devDependency`
- Add a custom `install` script
- Declare a `binary` object
This looks like:
```js
"dependencies" : {
"node-pre-gyp": "0.5.x"
},
"bundledDependencies":["node-pre-gyp"],
"devDependencies": {
"aws-sdk": "~2.0.0-rc.15"
}
"scripts": {
"install": "node-pre-gyp install --fallback-to-build",
},
"binary": {
"module_name": "your_module",
"module_path": "./lib/binding/",
"host": "https://your_module.s3-us-west-1.amazonaws.com"
}
```
For a full example see [node-addon-examples's package.json](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/package.json#L11-L22).
##### The `binary` object has three required properties
###### module_name
The name of your native node module. This value must:
- Match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world)
- Must be a valid C variable name (e.g. it cannot contain `-`)
- Should not include the `.node` extension.
###### module_path
The location your native module is placed after a build. This should be an empty directory without other Javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball.
Note: This property supports variables based on [Versioning](#versioning).
###### host
A url to the remote location where you've published tarball binaries (must be `https` not `http`).
It is highly recommended that you use Amazon S3. The reasons are:
- Various node-pre-gyp commands like `publish` and `info` only work with an S3 host.
- S3 is a very solid hosting platform for distributing large files, even [Github recommends using it instead of github](https://help.github.com/articles/distributing-large-binaries).
- We provide detail documentation for using [S3 hosting](#s3-hosting) with node-pre-gyp.
Why then not require S3? Because while some applications using node-pre-gyp need to distribute binaries as large as 20-30 MB, others might have very small binaries and might wish to store them in a github repo. This is not recommended, but if an author really wants to host in a non-s3 location then it should be possible.
##### The `binary` object has two optional properties
###### remote_path
It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`.
Note: This property supports variables based on [Versioning](#versioning).
###### package_name
It is **not recommended** to override this property unless you are also overriding the `remote_path`. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path` directory. Unless you specify `package_name` in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which allows your binary to work across node versions, platforms, and architectures. If you are using `remote_path` that is also versioned by `./{module_name}/v{version}` then you could remove these variables from the `package_name` and just use: `{node_abi}-{platform}-{arch}.tar.gz`. Then your remote tarball will be looked up at, for example, `https://example.com/your-module/v0.1.0/node-v11-linux-x64.tar.gz`.
Avoiding the version of your module in the `package_name` and instead only embedding in a directory name can be useful when you want to make a quick tag of your module that does not change any C++ code. In this case you can just copy binaries to the new version behind the scenes like:
```sh
aws s3 sync --acl public-read s3://mapbox-node-binary/sqlite3/v3.0.3/ s3://mapbox-node-binary/sqlite3/v3.0.4/
```
Note: This property supports variables based on [Versioning](#versioning).
#### 2) Add a new target to binding.gyp
`node-pre-gyp` calls out to `node-gyp` to compile the module and passes variables along like [module_name](#module_name) and [module_path](#module_path).
A new target must be added to `binding.gyp` that moves the compiled `.node` module from `./build/Release/module_name.node` into the directory specified by `module_path`.
Add a target like this at the end of your `targets` list:
```js
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "<(module_name)" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
"destination": "<(module_path)"
}
]
}
```
For a full example see [node-addon-example's binding.gyp](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/binding.gyp).
#### 3) Dynamically require your `.node`
Inside the main js file that requires your addon module you are likely currently doing:
```js
var binding = require('../build/Release/binding.node');
```
or:
```js
var bindings = require('./bindings')
```
Change those lines to:
```js
var binary = require('node-pre-gyp');
var path = require('path');
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
var binding = require(binding_path);
```
For a full example see [node-addon-example's index.js](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/index.js#L1-L4)
#### 4) Build and package your app
Now build your module from source:
npm install --build-from-source
The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build.
Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`.
#### 5) Test
Now `npm test` should work just as it did before.
#### 6) Publish the tarball
Then package your app:
./node_modules/.bin/node-pre-gyp package
Once packaged, now you can publish:
./node_modules/.bin/node-pre-gyp publish
Currently the `publish` command pushes your binary to S3. This requires:
- You have installed `aws-sdk` with `npm install aws-sdk`
- You have created a bucket already.
- The `host` points to an S3 http or https endpoint.
- You have configured node-pre-gyp to read your S3 credentials (see [S3 hosting](#s3-hosting) for details).
You can also host your binaries elsewhere. To do this requires:
- You manually publish the binary created by the `package` command to an `https` endpoint
- Ensure that the `host` value points to your custom `https` endpoint.
#### 7) Automate builds
Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated.
- See [Appveyor Automation](#appveyor-automation) for how to auto-publish builds on Windows.
- See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux.
#### 8) You're done!
Now publish your module to the npm registry. Users will now be able to install your module from a binary.
What will happen is this:
1. `npm install <your package>` will pull from the npm registry
2. npm will run the `install` script which will call out to `node-pre-gyp`
3. `node-pre-gyp` will fetch the binary `.node` module and unpack in the right place
4. Assuming that all worked, you are done
If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module.
## S3 Hosting
You can host wherever you choose but S3 is cheap, `node-pre-gyp publish` expects it, and S3 can be integrated well with [travis.ci](http://travis-ci.org) to automate builds for OS X and Ubuntu. Here is an approach to do this:
First, get setup locally and test the workflow:
#### 1) Create an S3 bucket
And have your **key** and **secret key** ready for writing to the bucket.
It is recommended to create a IAM user with a policy that only gives permissions to the specific bucket you plan to publish to. This can be done in the [IAM console](https://console.aws.amazon.com/iam/) by: 1) adding a new user, 2) choosing `Attach User Policy`, 3) Using the `Policy Generator`, 4) selecting `Amazon S3` for the service, 5) adding the actions: `DeleteObject`, `GetObject`, `GetObjectAcl`, `ListBucket`, `PutObject`, `PutObjectAcl`, 6) adding an ARN of `arn:aws:s3:::bucket/*` (replacing `bucket` with your bucket name), and finally 7) clicking `Add Statement` and saving the policy. It should generate a policy like:
```js
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "Stmt1394587197000",
"Effect": "Allow",
"Action": [
"s3:DeleteObject",
"s3:GetObject",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:PutObject",
"s3:PutObjectAcl"
],
"Resource": [
"arn:aws:s3:::node-pre-gyp-tests/*"
]
}
]
}
```
#### 2) Install node-pre-gyp
Either install it globally:
npm install node-pre-gyp -g
Or put the local version on your PATH
export PATH=`pwd`/node_modules/.bin/:$PATH
#### 3) Configure AWS credentials
There are several ways to do this.
You can use any of the methods described at http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html.
Or you can create a `~/.node_pre_gyprc`
Or pass options in any way supported by [RC](https://github.com/dominictarr/rc#standards)
A `~/.node_pre_gyprc` looks like:
```js
{
"accessKeyId": "xxx",
"secretAccessKey": "xxx"
}
```
Another way is to use your environment:
export node_pre_gyp_accessKeyId=xxx
export node_pre_gyp_secretAccessKey=xxx
You may also need to specify the `region` if it is not explicit in the `host` value you use. The `bucket` can also be specified but it is optional because `node-pre-gyp` will detect it from the `host` value.
#### 4) Package and publish your build
Install the `aws-sdk`:
npm install aws-sdk
Then publish:
node-pre-gyp package publish
Note: if you hit an error like `Hostname/IP doesn't match certificate's altnames` it may mean that you need to provide the `region` option in your config.
## Appveyor Automation
[Appveyor](http://www.appveyor.com/) can build binaries and publish the results per commit and supports:
- Windows Visual Studio 2013 and related compilers
- Both 64 bit (x64) and 32 bit (x86) build configurations
- Multiple Node.js versions
For an example of doing this see [node-sqlite3's appveyor.yml](https://github.com/mapbox/node-sqlite3/blob/master/appveyor.yml).
Below is a guide to getting set up:
#### 1) Create a free Appveyor account
Go to https://ci.appveyor.com/signup/free and sign in with your github account.
#### 2) Create a new project
Go to https://ci.appveyor.com/projects/new and select the github repo for your module
#### 3) Add appveyor.yml and push it
Once you have committed an `appveyor.yml` ([appveyor.yml reference](http://www.appveyor.com/docs/appveyor-yml)) to your github repo and pushed it appveyor should automatically start building your project.
#### 4) Create secure variables
Encrypt your S3 AWS keys by going to <https://ci.appveyor.com/tools/encrypt> and hitting the `encrypt` button.
Then paste the result into your `appveyor.yml`
```yml
environment:
node_pre_gyp_accessKeyId:
secure: Dn9HKdLNYvDgPdQOzRq/DqZ/MPhjknRHB1o+/lVU8MA=
node_pre_gyp_secretAccessKey:
secure: W1rwNoSnOku1r+28gnoufO8UA8iWADmL1LiiwH9IOkIVhDTNGdGPJqAlLjNqwLnL
```
NOTE: keys are per account but not per repo (this is difference than travis where keys are per repo but not related to the account used to encrypt them).
#### 5) Hook up publishing
Just put `node-pre-gyp package publish` in your `appveyor.yml` after `npm install`.
#### 6) Publish when you want
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [travis.ci idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
SET CM=%APPVEYOR_REPO_COMMIT_MESSAGE%
if not "%CM%" == "%CM:[publish binary]=%" node-pre-gyp --msvs_version=2013 publish
If your commit message contains special characters (e.g. `&`) this method might fail. An alternative is to use PowerShell, which gives you additional possibilities, like ignoring case by using `ToLower()`:
ps: if($env:APPVEYOR_REPO_COMMIT_MESSAGE.ToLower().Contains('[publish binary]')) { node-pre-gyp --msvs_version=2013 publish }
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. To automate the publishing of your entire package to npm on travis see http://about.travis-ci.org/docs/user/deployment/npm/
## Travis Automation
[Travis](https://travis-ci.org/) can push to S3 after a successful build and supports both:
- Ubuntu Precise and OS X (64 bit)
- Multiple Node.js versions
For an example of doing this see [node-add-example's .travis.yml](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/.travis.yml).
Note: if you need 32 bit binaries, this can be done from a 64 bit travis machine. See [the node-sqlite3 scripts for an example of doing this](https://github.com/mapbox/node-sqlite3/blob/bae122aa6a2b8a45f6b717fab24e207740e32b5d/scripts/build_against_node.sh#L54-L74).
Below is a guide to getting set up:
#### 1) Install the travis gem
gem install travis
#### 2) Create secure variables
Make sure you run this command from within the directory of your module.
Use `travis-encrypt` like:
travis encrypt node_pre_gyp_accessKeyId=${node_pre_gyp_accessKeyId}
travis encrypt node_pre_gyp_secretAccessKey=${node_pre_gyp_secretAccessKey}
Then put those values in your `.travis.yml` like:
```yaml
env:
global:
- secure: F+sEL/v56CzHqmCSSES4pEyC9NeQlkoR0Gs/ZuZxX1ytrj8SKtp3MKqBj7zhIclSdXBz4Ev966Da5ctmcTd410p0b240MV6BVOkLUtkjZJyErMBOkeb8n8yVfSoeMx8RiIhBmIvEn+rlQq+bSFis61/JkE9rxsjkGRZi14hHr4M=
- secure: o2nkUQIiABD139XS6L8pxq3XO5gch27hvm/gOdV+dzNKc/s2KomVPWcOyXNxtJGhtecAkABzaW8KHDDi5QL1kNEFx6BxFVMLO8rjFPsMVaBG9Ks6JiDQkkmrGNcnVdxI/6EKTLHTH5WLsz8+J7caDBzvKbEfTux5EamEhxIWgrI=
```
More details on travis encryption at http://about.travis-ci.org/docs/user/encryption-keys/.
#### 3) Hook up publishing
Just put `node-pre-gyp package publish` in your `.travis.yml` after `npm install`.
##### OS X publishing
If you want binaries for OS X in addition to linux you have two options:
1) [Enabling multi-OS](#enabling-multi-os)
2) [Using `language: objective-c` in a git branch](#using-language-objective-c).
##### Enabling multi-OS
This requires emailing a request to `support@travis-ci.com` for each repo you wish to have enabled. More details at <http://docs.travis-ci.com/user/multi-os/>.
Next you need to tweak the `.travis.yml` to ensure it is cross platform.
Use a configuration like:
```yml
language: cpp
os:
- linux
- osx
env:
matrix:
- NODE_VERSION="0.10"
- NODE_VERSION="0.11.14"
before_install:
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
- source ~/.nvm/nvm.sh
- nvm install $NODE_VERSION
- nvm use $NODE_VERSION
```
See [Travis OS X Gochas](#travis-os-x-gochas) for why we replace `language: node_js` and `node_js:` sections with `language: cpp` and a custom matrix.
Also create platform specific sections for any deps that need install. For example if you need libpng:
```yml
- if [ $(uname -s) == 'Linux' ]; then apt-get install libpng-dev; fi;
- if [ $(uname -s) == 'Darwin' ]; then brew install libpng; fi;
```
For detailed multi-OS examples see [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/.travis.yml) and [node-sqlite3](https://github.com/mapbox/node-sqlite3/blob/master/.travis.yml).
##### Using `language: objective-c`
If your repo does not have multi-OS enabled, an alternative method for building for OS X is to tweak your `.travis.yml` to use:
```yml
language: objective-c
```
Keep that change in a different git branch and sync that when you want binaries published.
Next learn about a few [Travis OS X Gochas](#travis-os-x-gochas).
##### Travis OS X Gochas
First, unlike the Travis linux machines the OS X machines do not put `node-pre-gyp` on PATH by default. So to you will need to:
```sh
export PATH=$(pwd)/node_modules/.bin:${PATH}
```
Second, the OS X machines doe not support using a matrix for installing node.js different versions. So you need to bootstrap the installation of node.js in a cross platform way.
By doing:
```yml
env:
matrix:
- NODE_VERSION="0.10"
- NODE_VERSION="0.11.14"
before_install:
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
- source ~/.nvm/nvm.sh
- nvm install $NODE_VERSION
- nvm use $NODE_VERSION
```
You can easily recreate the previous behavior of this matrix:
```yml
node_js:
- "0.10"
- "0.11.14"
```
#### 4) Publish when you want
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [travis.ci idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
COMMIT_MESSAGE=$(git show -s --format=%B $TRAVIS_COMMIT | tr -d '\n')
if [[ ${COMMIT_MESSAGE} =~ "[publish binary]" ]]; then node-pre-gyp publish; fi;
Then you can trigger new binaries to be built like:
git commit -a -m "[publish binary]"
Or, if you don't have any changes to make simply run:
git commit --allow-empty -m "[publish binary]"
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. To automate the publishing of your entire package to npm on travis see http://about.travis-ci.org/docs/user/deployment/npm/
# Versioning
The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed.
- `node_abi`: The node C++ `ABI` number. This value is available in Javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version.
- `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32` unless the user passed the `--target_platform` option to override.
- `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override.
- `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build.
- `module_name` - the `binary.module_name` attribute from `package.json`.
- `version` - the semver `version` value for your module from `package.json` (NOTE: ignores the `semver.build` property).
- `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version`
- `build` - the sevmer `build` value. For example it would be `this.that` if your package.json `version` was `v1.0.0+this.that`
- `prerelease` - the semver `prerelease` value. For example it would be `alpha.beta` if your package.json `version` was `v1.0.0-alpha.beta`
The options are visible in the code at <https://github.com/mapbox/node-pre-gyp/blob/612b7bca2604508d881e1187614870ba19a7f0c5/lib/util/versioning.js#L114-L127>

View File

@@ -1,30 +0,0 @@
environment:
matrix:
- nodejs_version: 0.10.36
- nodejs_version: 0.12.0
- nodejs_version: 1.2.0
platform:
- x64
- x86
shallow_clone: true
install:
- ps: Install-Product node $env:nodejs_version $env:Platform
- node --version
- npm --version
- SET PATH=%APPDATA%\npm;%PATH%
- npm update -g npm
- npm --version
- node -e "console.log(process.arch);"
- SET PATH=C:\Program Files (x86)\MSBuild\12.0\bin\;%PATH%
- if "%PLATFORM%" == "x64" set PATH=C:\Python27-x64;%PATH%
- if "%PLATFORM%" == "x86" SET PATH=C:\python27;%PATH%
- npm install
- npm test
- .\scripts\test.bat
build: off
test: off
deploy: off

View File

@@ -1,131 +0,0 @@
#!/usr/bin/env node
'use strict';
/**
* Set the title.
*/
process.title = 'node-pre-gyp';
/**
* Module dependencies.
*/
var node_pre_gyp = require('../');
var log = require('npmlog');
/**
* Process and execute the selected commands.
*/
var prog = new node_pre_gyp.Run();
var completed = false;
prog.parseArgv(process.argv);
if (prog.todo.length === 0) {
if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
console.log('v%s', prog.version);
} else {
console.log('%s', prog.usage());
}
return process.exit(0);
}
// if --no-color is passed
if (prog.opts && prog.opts.hasOwnProperty('color') && !prog.opts.color) {
log.disableColor();
}
log.info('it worked if it ends with', 'ok');
log.verbose('cli', process.argv);
log.info('using', process.title + '@%s', prog.version);
log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch);
/**
* Change dir if -C/--directory was passed.
*/
var dir = prog.opts.directory;
if (dir) {
var fs = require('fs');
try {
var stat = fs.statSync(dir);
if (stat.isDirectory()) {
log.info('chdir', dir);
process.chdir(dir);
} else {
log.warn('chdir', dir + ' is not a directory');
}
} catch (e) {
if (e.code === 'ENOENT') {
log.warn('chdir', dir + ' is not a directory');
} else {
log.warn('chdir', 'error during chdir() "%s"', e.message);
}
}
}
function run () {
var command = prog.todo.shift();
if (!command) {
// done!
completed = true;
log.info('ok');
return;
}
prog.commands[command.name](command.args, function (err) {
if (err) {
log.error(command.name + ' error');
log.error('stack', err.stack);
errorMessage();
log.error('not ok');
console.log(err.message);
return process.exit(1);
}
var args_array = [].slice.call(arguments, 1);
if (args_array.length) {
console.log.apply(console, args_array);
}
// now run the next command in the queue
process.nextTick(run);
});
}
process.on('exit', function (code) {
if (!completed && !code) {
log.error('Completion callback never invoked!');
issueMessage();
process.exit(6);
}
});
process.on('uncaughtException', function (err) {
log.error('UNCAUGHT EXCEPTION');
log.error('stack', err.stack);
issueMessage();
process.exit(7);
});
function errorMessage () {
// copied from npm's lib/util/error-handler.js
var os = require('os');
log.error('System', os.type() + ' ' + os.release());
log.error('command', process.argv.map(JSON.stringify).join(' '));
log.error('cwd', process.cwd());
log.error('node -v', process.version);
log.error(process.title+' -v', 'v' + prog.package.version);
}
function issueMessage () {
errorMessage();
log.error('', [ 'This is a bug in `'+process.title+'`.',
'Try to update '+process.title+' and file an issue if it does not help:',
' <https://github.com/mapbox/'+process.title+'/issues>',
].join('\n'));
}
// start running the given commands!
run();

View File

@@ -1,43 +0,0 @@
"use strict";
module.exports = exports = build;
exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp';
var compile = require('./util/compile.js');
var handle_gyp_opts = require('./util/handle_gyp_opts.js');
var configure = require('./configure.js');
function do_build(gyp,argv,callback) {
handle_gyp_opts(gyp,argv,function(err,result) {
var final_args = ['build'].concat(result.gyp).concat(result.pre);
if (result.unparsed.length > 0) {
final_args = final_args.
concat(['--']).
concat(result.unparsed);
}
compile.run_gyp(final_args,result.opts,function(err) {
return callback(err);
});
});
}
function build(gyp, argv, callback) {
// Form up commands to pass to node-gyp:
// We map `node-pre-gyp build` to `node-gyp configure build` so that we do not
// trigger a clean and therefore do not pay the penalty of a full recompile
if (argv.length && (argv.indexOf('rebuild') > -1)) {
// here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means
// "clean + configure + build" and triggers a full recompile
compile.run_gyp(['clean'],{},function(err) {
if (err) return callback(err);
configure(gyp,argv,function(err) {
if (err) return callback(err);
return do_build(gyp,argv,callback);
});
});
} else {
return do_build(gyp,argv,callback);
}
}

View File

@@ -1,23 +0,0 @@
"use strict";
module.exports = exports = clean;
exports.usage = 'Removes the entire folder containing the compiled .node module';
var fs = require('fs');
var rm = require('rimraf');
var exists = require('fs').exists || require('path').exists;
var versioning = require('./util/versioning.js');
function clean (gyp, argv, callback) {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
var to_delete = opts.module_path;
exists(to_delete, function(found) {
if (found) {
if (!gyp.opts.silent_clean) console.log('['+package_json.name+'] Removing "%s"', to_delete);
return rm(to_delete, callback);
}
return callback();
});
}

View File

@@ -1,48 +0,0 @@
"use strict";
module.exports = exports = configure;
exports.usage = 'Attempts to configure node-gyp or nw-gyp build';
var compile = require('./util/compile.js');
var handle_gyp_opts = require('./util/handle_gyp_opts.js');
function configure(gyp, argv, callback) {
handle_gyp_opts(gyp,argv,function(err,result) {
var final_args = result.gyp.concat(result.pre);
// pull select node-gyp configure options out of the npm environ
var known_gyp_args = ['dist-url','python','nodedir','msvs_version'];
known_gyp_args.forEach(function(key) {
var val = gyp.opts[key] || gyp.opts[key.replace('-','_')];
if (val) {
final_args.push('--'+key+'='+val);
}
});
// --ensure=false tell node-gyp to re-install node development headers
// but it is only respected by node-gyp install, so we have to call install
// as a separate step if the user passes it
if (gyp.opts.ensure === false) {
var install_args = final_args.concat(['install','--ensure=false']);
compile.run_gyp(install_args,result.opts,function(err) {
if (err) return callback(err);
if (result.unparsed.length > 0) {
final_args = final_args.
concat(['--']).
concat(result.unparsed);
}
compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) {
return callback(err);
});
});
} else {
if (result.unparsed.length > 0) {
final_args = final_args.
concat(['--']).
concat(result.unparsed);
}
compile.run_gyp(['configure'].concat(final_args),result.opts,function(err) {
return callback(err);
});
}
});
}

View File

@@ -1,40 +0,0 @@
"use strict";
module.exports = exports = unpublish;
exports.usage = 'Lists all published binaries (requires aws-sdk)';
var fs = require('fs');
var log = require('npmlog');
var versioning = require('./util/versioning.js');
var s3_setup = require('./util/s3_setup.js');
var config = require('rc')("node_pre_gyp",{acl:"public-read"});
function unpublish(gyp, argv, callback) {
var AWS = require("aws-sdk");
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
s3_setup.detect(opts.hosted_path,config);
AWS.config.update(config);
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Prefix: config.prefix
};
s3.listObjects(s3_opts, function(err, meta){
if (err && err.code == 'NotFound') {
return callback(new Error('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix));
} else if(err) {
return callback(err);
} else {
log.verbose(JSON.stringify(meta,null,1));
if (meta && meta.Contents) {
meta.Contents.forEach(function(obj) {
console.log(obj.Key);
});
} else {
console.error('['+package_json.name+'] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/'+config.prefix );
}
return callback();
}
});
}

View File

@@ -1,205 +0,0 @@
"use strict";
module.exports = exports = install;
exports.usage = 'Attempts to install pre-built binary for module';
var fs = require('fs');
var path = require('path');
var zlib = require('zlib');
var log = require('npmlog');
var existsAsync = fs.exists || path.exists;
var versioning = require('./util/versioning.js');
var testbinary = require('./testbinary.js');
var clean = require('./clean.js');
function download(uri,opts,callback) {
log.http('GET', uri);
var req = null;
var requestOpts = {
uri: uri.replace('+','%2B'),
headers: {
'User-Agent': 'node-pre-gyp (node ' + process.version + ')'
}
};
var proxyUrl = opts.proxy ||
process.env.http_proxy ||
process.env.HTTP_PROXY ||
process.env.npm_config_proxy;
if (proxyUrl) {
if (/^https?:\/\//i.test(proxyUrl)) {
log.verbose('download', 'using proxy url: "%s"', proxyUrl);
requestOpts.proxy = proxyUrl;
} else {
log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl);
}
}
try {
req = require('request')(requestOpts);
} catch (e) {
return callback(e);
}
if (req) {
req.on('response', function (res) {
log.http(res.statusCode, uri);
});
}
return callback(null,req);
}
function place_binary(from,to,opts,callback) {
download(from,opts,function(err,req) {
if (err) return callback(err);
if (!req) return callback(new Error("empty req"));
var badDownload = false;
var extractCount = 0;
var gunzip = zlib.createGunzip();
var extracter = require('tar').Extract({ path: to, strip: 1});
function afterTarball(err) {
if (err) return callback(err);
if (badDownload) return callback(new Error("bad download"));
if (extractCount === 0) {
return callback(new Error('There was a fatal problem while downloading/extracting the tarball'));
}
log.info('tarball', 'done parsing tarball');
callback();
}
function filter_func(entry) {
// ensure directories are +x
// https://github.com/mapnik/node-mapnik/issues/262
entry.props.mode |= (entry.props.mode >>> 2) & parseInt('0111',8);
log.info('install','unpacking ' + entry.path);
extractCount++;
}
gunzip.on('error', callback);
extracter.on('entry', filter_func);
extracter.on('error', callback);
extracter.on('end', afterTarball);
req.on('error', function(err) {
badDownload = true;
return callback(err);
});
req.on('close', function () {
if (extractCount === 0) {
return callback(new Error('Connection closed while downloading tarball file'));
}
});
req.on('response', function(res) {
if (res.statusCode !== 200) {
badDownload = true;
if (res.statusCode == 404) {
return callback(new Error('Pre-built binary not available for your system, looked for ' + from));
} else {
return callback(new Error(res.statusCode + ' status code downloading tarball ' + from));
}
}
// start unzipping and untaring
req.pipe(gunzip).pipe(extracter);
});
});
}
function do_build(gyp,argv,callback) {
gyp.todo.push( { name: 'build', args: ['rebuild'] } );
process.nextTick(callback);
}
function install(gyp, argv, callback) {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source;
var update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary;
var should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true');
var no_rollback = gyp.opts.hasOwnProperty('rollback') && gyp.opts.rollback === false;
if (should_do_source_build) {
log.info('build','requesting source compile');
return do_build(gyp,argv,callback);
} else {
var fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build;
var should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true');
// but allow override from npm
if (process.env.npm_config_argv) {
var cooked = JSON.parse(process.env.npm_config_argv).cooked;
var match = cooked.indexOf("--fallback-to-build");
if (match > -1 && cooked.length > match && cooked[match+1] == "false") {
should_do_fallback_build = false;
log.info('install','Build fallback disabled via npm flag: --fallback-to-build=false');
}
}
var opts;
try {
opts = versioning.evaluate(package_json, gyp.opts);
} catch (err) {
return callback(err);
}
var from = opts.hosted_tarball;
var to = opts.module_path;
var binary_module = path.join(to,opts.module_name + '.node');
if (existsAsync(binary_module,function(found) {
if (found && !update_binary) {
testbinary(gyp, argv, function(err) {
if (err) {
console.error('['+package_json.name+'] ' + err.message);
log.error("Testing local pre-built binary failed, attempting to re-download");
place_binary(from,to,opts,function(err) {
if (err) {
if (should_do_fallback_build) {
log.http(err.message + ' (falling back to source compile with node-gyp)');
return do_build(gyp,argv,callback);
} else {
return callback(err);
}
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" is reinstalled via remote');
return callback();
}
});
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" already installed');
console.log('Pass --update-binary to reinstall or --build-from-source to recompile');
return callback();
}
});
} else {
if (!update_binary) log.info('check','checked for "' + binary_module + '" (not found)');
place_binary(from,to,opts,function(err) {
if (err && should_do_fallback_build) {
log.http(err.message + ' (falling back to source compile with node-gyp)');
return do_build(gyp,argv,callback);
} else if (err) {
return callback(err);
} else {
testbinary(gyp, argv, function(err) {
if (err) {
if (no_rollback) {
return callback(err);
}
gyp.opts.silent_clean = true;
clean(gyp, argv, function(error) {
if (error) console.log(error);
if (should_do_fallback_build) {
console.error('['+package_json.name+'] ' + err.message);
log.error("Testing pre-built binary failed, attempting to source compile");
return do_build(gyp,argv,callback);
} else {
return callback(err);
}
});
} else {
console.log('['+package_json.name+'] Success: "' + binary_module + '" is installed via remote');
return callback();
}
});
}
});
}
}));
}
}

View File

@@ -1,192 +0,0 @@
"use strict";
/**
* Module exports.
*/
module.exports = exports;
/**
* Module dependencies.
*/
var path = require('path');
var nopt = require('nopt');
var log = require('npmlog');
var EE = require('events').EventEmitter;
var inherits = require('util').inherits;
var commands = [
'clean',
'install',
'reinstall',
'build',
'rebuild',
'package',
'testpackage',
'publish',
'publish-maybe',
'unpublish',
'info',
'testbinary',
'reveal',
'configure'
];
var aliases = {};
// differentiate node-pre-gyp's logs from npm's
log.heading = 'node-pre-gyp';
exports.find = require('./pre-binding').find;
function Run() {
var self = this;
this.commands = {};
commands.forEach(function (command) {
self.commands[command] = function (argv, callback) {
log.verbose('command', command, argv);
return require('./' + command)(self, argv, callback);
};
});
}
inherits(Run, EE);
exports.Run = Run;
var proto = Run.prototype;
/**
* Export the contents of the package.json.
*/
proto.package = require('../package');
/**
* nopt configuration definitions
*/
proto.configDefs = {
help: Boolean, // everywhere
arch: String, // 'configure'
debug: Boolean, // 'build'
directory: String, // bin
proxy: String, // 'install'
loglevel: String, // everywhere
};
/**
* nopt shorthands
*/
proto.shorthands = {
release: '--no-debug',
C: '--directory',
debug: '--debug',
j: '--jobs',
silent: '--loglevel=silent',
silly: '--loglevel=silly',
verbose: '--loglevel=verbose',
};
/**
* expose the command aliases for the bin file to use.
*/
proto.aliases = aliases;
/**
* Parses the given argv array and sets the 'opts',
* 'argv' and 'command' properties.
*/
proto.parseArgv = function parseOpts (argv) {
this.opts = nopt(this.configDefs, this.shorthands, argv);
this.argv = this.opts.argv.remain.slice();
var commands = this.todo = [];
// create a copy of the argv array with aliases mapped
argv = this.argv.map(function (arg) {
// is this an alias?
if (arg in this.aliases) {
arg = this.aliases[arg];
}
return arg;
}, this);
// process the mapped args into "command" objects ("name" and "args" props)
argv.slice().forEach(function (arg) {
if (arg in this.commands) {
var args = argv.splice(0, argv.indexOf(arg));
argv.shift();
if (commands.length > 0) {
commands[commands.length - 1].args = args;
}
commands.push({ name: arg, args: [] });
}
}, this);
if (commands.length > 0) {
commands[commands.length - 1].args = argv.splice(0);
}
// support for inheriting config env variables from npm
var npm_config_prefix = 'npm_config_';
Object.keys(process.env).forEach(function (name) {
if (name.indexOf(npm_config_prefix) !== 0) return;
var val = process.env[name];
if (name === npm_config_prefix + 'loglevel') {
log.level = val;
} else {
// add the user-defined options to the config
name = name.substring(npm_config_prefix.length);
// avoid npm argv clobber already present args
// which avoids problem of 'npm test' calling
// script that runs unique npm install commands
if (name === 'argv') {
if (this.opts.argv &&
this.opts.argv.remain &&
this.opts.argv.remain.length) {
// do nothing
} else {
this.opts[name] = val;
}
} else {
this.opts[name] = val;
}
}
}, this);
if (this.opts.loglevel) {
log.level = this.opts.loglevel;
}
log.resume();
};
/**
* Returns the usage instructions for node-pre-gyp.
*/
proto.usage = function usage () {
var str = [
'',
' Usage: node-pre-gyp <command> [options]',
'',
' where <command> is one of:',
commands.map(function (c) {
return ' - ' + c + ' - ' + require('./' + c).usage;
}).join('\n'),
'',
'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'),
'node@' + process.versions.node
].join('\n');
return str;
};
/**
* Version number getter.
*/
Object.defineProperty(proto, 'version', {
get: function () {
return this.package.version;
},
enumerable: true
});

View File

@@ -1,46 +0,0 @@
"use strict";
module.exports = exports = _package;
exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball';
var fs = require('fs');
var path = require('path');
var log = require('npmlog');
var versioning = require('./util/versioning.js');
var write = require('fs').createWriteStream;
var existsAsync = fs.exists || path.exists;
var mkdirp = require('mkdirp');
function _package(gyp, argv, callback) {
var pack = require('tar-pack').pack;
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
var from = opts.module_path;
var binary_module = path.join(from,opts.module_name + '.node');
existsAsync(binary_module,function(found) {
if (!found) {
return callback(new Error("Cannot package because " + binary_module + " missing: run `node-pre-gyp rebuild` first"));
}
var tarball = opts.staged_tarball;
var filter_func = function(entry) {
// ensure directories are +x
// https://github.com/mapnik/node-mapnik/issues/262
log.info('package','packing ' + entry.path);
return true;
};
mkdirp(path.dirname(tarball),function(err) {
if (err) throw err;
pack(from, { filter: filter_func })
.pipe(write(tarball))
.on('error', function(err) {
if (err) console.error('['+package_json.name+'] ' + err.message);
return callback(err);
})
.on('close', function() {
log.info('package','Binary staged at "' + tarball + '"');
return callback();
});
});
});
}

View File

@@ -1,25 +0,0 @@
"use strict";
var versioning = require('../lib/util/versioning.js');
var existsSync = require('fs').existsSync || require('path').existsSync;
var path = require('path');
module.exports = exports;
exports.usage = 'Finds the require path for the node-pre-gyp installed module';
exports.validate = function(package_json) {
versioning.validate_config(package_json);
};
exports.find = function(package_json_path,opts) {
if (!existsSync(package_json_path)) {
throw new Error("package.json does not exist at " + package_json_path);
}
var package_json = require(package_json_path);
versioning.validate_config(package_json);
opts = opts || {};
if (!opts.module_root) opts.module_root = path.dirname(package_json_path);
var meta = versioning.evaluate(package_json,opts);
return meta.module;
};

View File

@@ -1,146 +0,0 @@
'use strict';
var fs = require('fs');
var log = require('npmlog');
var versioning = require('./util/versioning.js');
var s3_setup = require('./util/s3_setup.js');
var url = require('url');
var config = require('rc')("node_pre_gyp", {
acl: "public-read"
});
var configure = require('./configure.js');
var compile = require('./util/compile.js');
var handle_gyp_opts = require('./util/handle_gyp_opts.js');
var createTarball = require('./package');
var AWS;
try {
AWS = require('aws-sdk');
} catch (e) {
log.error('publish-maybe', 'aws-sdk not installed');
}
function rebuild(gyp, argv, done) {
compile.run_gyp(['clean'], {}, function(err) {
if (err) return done(err);
configure(gyp, argv, function(err) {
if (err) return done(err);
handle_gyp_opts(gyp, argv, function(err, result) {
var final_args = ['build'].concat(result.gyp).concat(result.pre);
if (result.unparsed.length > 0) {
final_args = final_args.
concat(['--']).
concat(result.unparsed);
}
compile.run_gyp(final_args, result.opts, function(err) {
return done(err);
});
});
});
});
}
function PublishMaybe(gyp, argv, done) {
if (!(this instanceof PublishMaybe)) return new PublishMaybe(gyp, argv, done);
if (AWS) {
var pkg = JSON.parse(fs.readFileSync('./package.json'));
this.state = versioning.evaluate(pkg, gyp.opts);
s3_setup.detect(this.state.hosted_path, config);
AWS.config.update(config);
this.state.s3_bucket = config.bucket || pkg.binary.bucket;
this.state.s3_key = url.resolve(config.prefix, this.state.package_name);
log.info('publish-maybe', 'loaded state: ' + JSON.stringify(this.state));
if (!this.state.s3_bucket) {
throw new Error('Could not detect s3 bucket automatically and not set in config.');
}
this.s3 = new AWS.S3();
}
var self = this;
this.prepublish(function(err, ok) {
if (err) return done(err);
if (!ok) {
return done();
}
self.publish(gyp, argv, done);
});
}
PublishMaybe.prototype.publish = function(gyp, argv, done) {
var self = this;
rebuild(gyp, argv, function(err) {
if (err) return done(err);
createTarball(gyp, argv, function(err) {
if (err) return done(err);
var opts = {
ACL: config.acl,
Body: fs.createReadStream(self.state.staged_tarball),
Bucket: self.state.s3_bucket,
Key: self.state.s3_key
};
log.info('publish-maybe', 'Putting object');
self.s3.putObject(opts, function(err, resp) {
if (err) {
log.info('publish', 's3 putObject error: "' + err + '"');
return done(err);
}
if (resp) {
log.info('publish-maybe', 's3 putObject response: "' + JSON.stringify(resp) + '"');
}
console.log('[' + self.state.name + '] published to ' + self.state.hosted_tarball);
return done();
});
});
});
};
PublishMaybe.prototype.prepublish = function(done) {
var self = this;
self.canPublish(function(err) {
if (err) return done(err);
self.shouldPublish(done);
});
};
PublishMaybe.prototype.canPublish = function(fn) {
if (!AWS) {
return fn(new Error('aws-sdk not installed. run `npm install -g aws-sdk and try again.`'));
}
fn();
};
PublishMaybe.prototype.shouldPublish = function(fn) {
var opts = {
Bucket: this.state.s3_bucket,
Key: this.state.s3_key
};
log.info('publish-maybe', 'Checking for existing binary on S3 ' + JSON.stringify(opts));
this.s3.headObject(opts, function(err) {
if (err) {
if (err.code == 'NotFound') {
log.info('publish-maybe', 'OK binary does not exist so we should publish');
return fn(null, true);
}
log.error('publish-maybe', 'Unexpected error: ' + err.stack);
return fn(err);
}
log.info('publish-maybe', 'Binary already exists so we do not need to publish');
return fn(null, false);
});
};
module.exports = PublishMaybe;
module.exports.usage = 'Publishes pre-built binary if one does not exist (requires aws-sdk)';

View File

@@ -1,77 +0,0 @@
"use strict";
module.exports = exports = publish;
exports.usage = 'Publishes pre-built binary (requires aws-sdk)';
var fs = require('fs');
var path = require('path');
var log = require('npmlog');
var versioning = require('./util/versioning.js');
var s3_setup = require('./util/s3_setup.js');
var existsAsync = fs.exists || path.exists;
var url = require('url');
var config = require('rc')("node_pre_gyp",{acl:"public-read"});
function publish(gyp, argv, callback) {
var AWS = require("aws-sdk");
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
var tarball = opts.staged_tarball;
existsAsync(tarball,function(found) {
if (!found) {
return callback(new Error("Cannot publish because " + tarball + " missing: run `node-pre-gyp package` first"));
}
log.info('publish', 'Detecting s3 credentials');
s3_setup.detect(opts.hosted_path,config);
var key_name = url.resolve(config.prefix,opts.package_name);
log.info('publish', 'Authenticating with s3');
AWS.config.update(config);
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Key: key_name
};
var remote_package = 'https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key;
log.info('publish', 'Checking for existing binary at ' + remote_package);
s3.headObject(s3_opts, function(err, meta){
if (meta) log.info('publish', JSON.stringify(meta));
if (err && err.code == 'NotFound') {
// we are safe to publish because
// the object does not already exist
log.info('publish', 'Preparing to put object');
var s3_put = new AWS.S3();
var s3_put_opts = { ACL: config.acl,
Body: fs.createReadStream(tarball),
Bucket: config.bucket,
Key: key_name
};
log.info('publish', 'Putting object');
try {
s3_put.putObject(s3_put_opts, function(err, resp){
log.info('publish', 'returned from putting object');
if(err) {
log.info('publish', 's3 putObject error: "' + err + '"');
return callback(err);
}
if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"');
log.info('publish', 'successfully put object');
console.log('['+package_json.name+'] published to ' + remote_package);
return callback();
});
} catch (err) {
log.info('publish', 's3 putObject error: "' + err + '"');
return callback(err);
}
} else if (err) {
log.info('publish', 's3 headObject error: "' + err + '"');
return callback(err);
} else {
log.error('publish','Cannot publish over existing version');
log.error('publish',"Update the 'version' field in package.json and try again");
log.error('publish','If the previous version was published in error see:');
log.error('publish','\t node-pre-gyp unpublish');
return callback(new Error('Failed publishing to ' + remote_package));
}
});
});
}

View File

@@ -1,13 +0,0 @@
"use strict";
module.exports = exports = rebuild;
exports.usage = 'Runs "clean" and "build" at once';
function rebuild (gyp, argv, callback) {
gyp.todo.unshift(
{ name: 'clean', args: [] },
{ name: 'build', args: ['rebuild'] }
);
process.nextTick(callback);
}

View File

@@ -1,13 +0,0 @@
"use strict";
module.exports = exports = rebuild;
exports.usage = 'Runs "clean" and "install" at once';
function rebuild (gyp, argv, callback) {
gyp.todo.unshift(
{ name: 'clean', args: [] },
{ name: 'install', args: [] }
);
process.nextTick(callback);
}

View File

@@ -1,31 +0,0 @@
"use strict";
module.exports = exports = reveal;
exports.usage = 'Reveals data on the versioned binary';
var fs = require('fs');
var versioning = require('./util/versioning.js');
function unix_paths(key, val) {
return val && val.replace ? val.replace(/\\/g, '/') : val;
}
function reveal(gyp, argv, callback) {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
var hit = false;
// if a second arg is passed look to see
// if it is a known option
//console.log(JSON.stringify(gyp.opts,null,1))
var remain = gyp.opts.argv.remain.pop();
if (remain && opts.hasOwnProperty(remain)) {
console.log(opts[remain].replace(/\\/g, '/'));
hit = true;
}
// otherwise return all options as json
if (!hit) {
console.log(JSON.stringify(opts,unix_paths,2));
}
return callback();
}

View File

@@ -1,73 +0,0 @@
"use strict";
module.exports = exports = testbinary;
exports.usage = 'Tests that the binary.node can be required';
var fs = require('fs');
var path = require('path');
var log = require('npmlog');
var cp = require('child_process');
var versioning = require('./util/versioning.js');
var path = require('path');
function testbinary(gyp, argv, callback) {
var args = [];
var options = {};
var shell_cmd = process.execPath;
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
// ensure on windows that / are used for require path
var binary_module = opts.module.replace(/\\/g, '/');
var nw = (opts.runtime && opts.runtime === 'node-webkit');
if (nw) {
options.timeout = 5000;
if (process.platform === 'darwin') {
shell_cmd = 'node-webkit';
} else if (process.platform === 'win32') {
shell_cmd = 'nw.exe';
} else {
shell_cmd = 'nw';
}
var modulePath = path.resolve(binary_module);
var appDir = path.join(__dirname, 'util', 'nw-pre-gyp');
args.push(appDir);
args.push(modulePath);
log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) {
// check for normal timeout for node-webkit
if (err) {
if (err.killed === true && err.signal && err.signal.indexOf('SIG') > -1) {
return callback();
}
var stderrLog = stderr.toString();
log.info('stderr', stderrLog);
if( /^\s*Xlib:\s*extension\s*"RANDR"\s*missing\s*on\s*display\s*":\d+\.\d+"\.\s*$/.test(stderrLog) ){
log.info('RANDR', 'stderr contains only RANDR error, ignored');
return callback();
}
return callback(err);
}
return callback();
});
return;
}
if ((process.arch != opts.target_arch) ||
(process.platform != opts.target_platform)) {
var msg = "skipping validation since host platform/arch (";
msg += process.platform+'/'+process.arch+")";
msg += " does not match target (";
msg += opts.target_platform+'/'+opts.target_arch+")";
log.info('validate', msg);
return callback();
}
args.push('--eval');
args.push("'require(\\'" + binary_module.replace(/\'/g, '\\\'') +"\\')'");
log.info("validate","Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
cp.execFile(shell_cmd, args, options, function(err, stdout, stderr) {
if (err) {
return callback(err, { stdout:stdout, stderr:stderr});
}
return callback();
});
}

View File

@@ -1,49 +0,0 @@
"use strict";
module.exports = exports = testpackage;
exports.usage = 'Tests that the staged package is valid';
var fs = require('fs');
var path = require('path');
var log = require('npmlog');
var existsAsync = fs.exists || path.exists;
var versioning = require('./util/versioning.js');
var testbinary = require('./testbinary.js');
var read = require('fs').createReadStream;
var zlib = require('zlib');
function testpackage(gyp, argv, callback) {
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
var tarball = opts.staged_tarball;
existsAsync(tarball, function(found) {
if (!found) {
return callback(new Error("Cannot test package because " + tarball + " missing: run `node-pre-gyp package` first"));
}
var to = opts.module_path;
var gunzip = zlib.createGunzip();
var extracter = require('tar').Extract({ path: to, strip: 1 });
function filter_func(entry) {
// ensure directories are +x
// https://github.com/mapnik/node-mapnik/issues/262
entry.props.mode |= (entry.props.mode >>> 2) & parseInt('0111',8);
log.info('install','unpacking ' + entry.path);
}
gunzip.on('error', callback);
extracter.on('error', callback);
extracter.on('entry', filter_func);
extracter.on('end', function(err) {
if (err) return callback(err);
testbinary(gyp,argv,function(err) {
if (err) {
return callback(err);
} else {
console.log('['+package_json.name+'] Package appears valid');
return callback();
}
});
});
read(tarball).pipe(gunzip).pipe(extracter);
});
}

View File

@@ -1,41 +0,0 @@
"use strict";
module.exports = exports = unpublish;
exports.usage = 'Unpublishes pre-built binary (requires aws-sdk)';
var fs = require('fs');
var log = require('npmlog');
var versioning = require('./util/versioning.js');
var s3_setup = require('./util/s3_setup.js');
var url = require('url');
var config = require('rc')("node_pre_gyp",{acl:"public-read"});
function unpublish(gyp, argv, callback) {
var AWS = require("aws-sdk");
var package_json = JSON.parse(fs.readFileSync('./package.json'));
var opts = versioning.evaluate(package_json, gyp.opts);
s3_setup.detect(opts.hosted_path,config);
AWS.config.update(config);
var key_name = url.resolve(config.prefix,opts.package_name);
var s3 = new AWS.S3();
var s3_opts = { Bucket: config.bucket,
Key: key_name
};
s3.headObject(s3_opts, function(err, meta) {
if (err && err.code == 'NotFound') {
console.log('['+package_json.name+'] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
return callback();
} else if(err) {
return callback(err);
} else {
log.info('unpublish', JSON.stringify(meta));
s3.deleteObject(s3_opts, function(err, resp) {
if (err) return callback(err);
log.info(JSON.stringify(resp));
console.log('['+package_json.name+'] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
return callback();
});
}
});
}

View File

@@ -1,366 +0,0 @@
{
"0.8.0": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.1": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.2": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.3": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.4": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.5": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.6": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.7": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.8": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.9": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.10": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.11": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.12": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.13": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.14": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.15": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.16": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.17": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.18": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.19": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.20": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.21": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.22": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.23": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.24": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.25": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.26": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.27": {
"node_abi": 1,
"v8": "3.11"
},
"0.8.28": {
"node_abi": 1,
"v8": "3.11"
},
"0.10.0": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.1": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.2": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.3": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.4": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.5": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.6": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.7": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.8": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.9": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.10": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.11": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.12": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.13": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.14": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.15": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.16": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.17": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.18": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.19": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.20": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.21": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.22": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.23": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.24": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.25": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.26": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.27": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.28": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.29": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.30": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.31": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.32": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.33": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.34": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.35": {
"node_abi": 11,
"v8": "3.14"
},
"0.10.36": {
"node_abi": 11,
"v8": "3.14"
},
"0.11.0": {
"node_abi": 12,
"v8": "3.17"
},
"0.11.1": {
"node_abi": 12,
"v8": "3.18"
},
"0.11.2": {
"node_abi": 12,
"v8": "3.19"
},
"0.11.3": {
"node_abi": 12,
"v8": "3.19"
},
"0.11.4": {
"node_abi": 12,
"v8": "3.20"
},
"0.11.5": {
"node_abi": 12,
"v8": "3.20"
},
"0.11.6": {
"node_abi": 12,
"v8": "3.20"
},
"0.11.7": {
"node_abi": 12,
"v8": "3.20"
},
"0.11.8": {
"node_abi": 13,
"v8": "3.21"
},
"0.11.9": {
"node_abi": 13,
"v8": "3.22"
},
"0.11.10": {
"node_abi": 13,
"v8": "3.22"
},
"0.11.11": {
"node_abi": 14,
"v8": "3.22"
},
"0.11.12": {
"node_abi": 14,
"v8": "3.22"
},
"0.11.13": {
"node_abi": 14,
"v8": "3.25"
},
"0.11.14": {
"node_abi": 14,
"v8": "3.26"
},
"0.11.15": {
"node_abi": 14,
"v8": "3.28"
},
"0.11.16": {
"node_abi": 14,
"v8": "3.28"
},
"0.12.0": {
"node_abi": 14,
"v8": "3.28"
},
"1.0.0": {
"node_abi": 42,
"v8": "3.31"
},
"1.0.1": {
"node_abi": 42,
"v8": "3.31"
},
"1.0.2": {
"node_abi": 42,
"v8": "3.31"
},
"1.0.3": {
"node_abi": 42,
"v8": "4.1"
},
"1.0.4": {
"node_abi": 42,
"v8": "4.1"
},
"1.1.0": {
"node_abi": 43,
"v8": "4.1"
},
"1.2.0": {
"node_abi": 43,
"v8": "4.1"
}
}

View File

@@ -1,87 +0,0 @@
"use strict";
module.exports = exports;
var fs = require('fs');
var path = require('path');
var win = process.platform == 'win32';
var existsSync = fs.existsSync || path.existsSync;
var cp = require('child_process');
// try to build up the complete path to node-gyp
/* priority:
- node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887)
- node-gyp on NODE_PATH
- node-gyp inside npm on NODE_PATH (ignore on iojs)
- node-gyp inside npm beside node exe
*/
function which_node_gyp() {
var node_gyp_bin;
if (process.env.npm_config_node_gyp) {
try {
node_gyp_bin = process.env.npm_config_node_gyp;
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
} catch (err) { }
}
try {
var node_gyp_main = require.resolve('node-gyp');
node_gyp_bin = path.join(path.dirname(
path.dirname(node_gyp_main)),
'bin/node-gyp.js');
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
} catch (err) { }
if (process.execPath.indexOf('iojs') === -1) {
try {
var npm_main = require.resolve('npm');
node_gyp_bin = path.join(path.dirname(
path.dirname(npm_main)),
'node_modules/node-gyp/bin/node-gyp.js');
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
} catch (err) { }
}
var npm_base = path.join(path.dirname(
path.dirname(process.execPath)),
'lib/node_modules/npm/');
node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js');
if (existsSync(node_gyp_bin)) {
return node_gyp_bin;
}
}
module.exports.run_gyp = function(args,opts,callback) {
var shell_cmd = '';
var cmd_args = [];
if (opts.runtime && opts.runtime == 'node-webkit') {
shell_cmd = 'nw-gyp';
if (win) shell_cmd += '.cmd';
} else {
var node_gyp_path = which_node_gyp();
if (node_gyp_path) {
shell_cmd = process.execPath;
cmd_args.push(node_gyp_path);
} else {
shell_cmd = 'node-gyp';
if (win) shell_cmd += '.cmd';
}
}
var final_args = cmd_args.concat(args);
var cmd = cp.spawn(shell_cmd, final_args, {cwd: undefined, env: process.env, stdio: [ 0, 1, 2]});
cmd.on('error', function (err) {
if (err) {
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + err + ")"));
}
callback(null,opts);
});
cmd.on('close', function (code) {
if (code && code !== 0) {
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + code + ")"));
}
callback(null,opts);
});
};

View File

@@ -1,94 +0,0 @@
"use strict";
module.exports = exports = handle_gyp_opts;
var fs = require('fs');
var versioning = require('./versioning.js');
/*
Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp.
We massage the args and options slightly to account for differences in what commands mean between
node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below)
Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether
node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm.
We also try to preserve any command line options that might have been passed to npm or node-pre-gyp.
But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all
the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have
to be very selective about what we pass through.
For example:
`npm install --build-from-source` will give:
argv == [ 'rebuild' ]
gyp.opts.argv == { remain: [ 'install' ],
cooked: [ 'install', '--fallback-to-build' ],
original: [ 'install', '--fallback-to-build' ] }
`./bin/node-pre-gyp build` will give:
argv == []
gyp.opts.argv == { remain: [ 'build' ],
cooked: [ 'build' ],
original: [ '-C', 'test/app1', 'build' ] }
*/
// select set of node-pre-gyp versioning info
// to share with node-gyp
var share_with_node_gyp = [
'module',
'module_name',
'module_path',
];
function handle_gyp_opts(gyp, argv, callback) {
// Collect node-pre-gyp specific variables to pass to node-gyp
var node_pre_gyp_options = [];
// generate custom node-pre-gyp versioning info
var opts = versioning.evaluate(JSON.parse(fs.readFileSync('./package.json')), gyp.opts);
share_with_node_gyp.forEach(function(key) {
var val = opts[key];
if (val) {
node_pre_gyp_options.push('--' + key + '=' + val);
} else {
return callback(new Error("Option " + key + " required but not found by node-pre-gyp"));
}
});
// Collect options that follow the special -- which disables nopt parsing
var unparsed_options = [];
var double_hyphen_found = false;
gyp.opts.argv.original.forEach(function(opt) {
if (double_hyphen_found) {
unparsed_options.push(opt);
}
if (opt == '--') {
double_hyphen_found = true;
}
});
// We try respect and pass through remaining command
// line options (like --foo=bar) to node-gyp
var cooked = gyp.opts.argv.cooked;
var node_gyp_options = [];
cooked.forEach(function(value) {
if (value.length > 2 && value.slice(0,2) == '--') {
var key = value.slice(2);
var val = cooked[cooked.indexOf(value)+1];
if (val && val.indexOf('--') === -1) { // handle '--foo=bar' or ['--foo','bar']
node_gyp_options.push('--' + key + '=' + val);
} else { // pass through --foo
node_gyp_options.push(value);
}
}
});
var result = {'opts':opts,'gyp':node_gyp_options,'pre':node_pre_gyp_options,'unparsed':unparsed_options};
return callback(null,result);
}

View File

@@ -1,26 +0,0 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Node-webkit-based module test</title>
<script>
function nwModuleTest(){
var util = require('util');
var moduleFolder = require('nw.gui').App.argv[0];
try {
require(moduleFolder);
} catch(e) {
if( process.platform !== 'win32' ){
util.log('nw-pre-gyp error:');
util.log(e.stack);
}
process.exit(1);
}
process.exit(0);
}
</script>
</head>
<body onload="nwModuleTest()">
<h1>Node-webkit-based module test</h1>
</body>
</html>

View File

@@ -1,9 +0,0 @@
{
"main": "index.html",
"name": "nw-pre-gyp-module-test",
"description": "Node-webkit-based module test.",
"version": "0.0.1",
"window": {
"show": false
}
}

View File

@@ -1,27 +0,0 @@
"use strict";
module.exports = exports;
var url = require('url');
var URI_REGEX="^(.*)\.(s3(?:-.*)?)\.amazonaws\.com$";
module.exports.detect = function(to,config) {
var uri = url.parse(to);
var hostname_matches = uri.hostname.match(URI_REGEX);
config.prefix = (!uri.pathname || uri.pathname == '/') ? '' : uri.pathname.replace('/','');
if(!hostname_matches) {
return;
}
if (!config.bucket) {
config.bucket = hostname_matches[1];
}
if (!config.region) {
var s3_domain = hostname_matches[2];
if (s3_domain.slice(0,3) == 's3-' &&
s3_domain.length >= 3) {
// it appears the region is explicit in the url
config.region = s3_domain.replace('s3-','');
}
}
};

View File

@@ -1,276 +0,0 @@
"use strict";
module.exports = exports;
var path = require('path');
var semver = require('semver');
var url = require('url');
var abi_crosswalk;
// This is used for unit testing to provide a fake
// ABI crosswalk that emulates one that is not updated
// for the current version
if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) {
abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK);
} else {
abi_crosswalk = require('./abi_crosswalk.json');
}
function get_node_webkit_abi(runtime, target_version) {
if (!runtime) {
throw new Error("get_node_webkit_abi requires valid runtime arg");
}
if (typeof target_version === 'undefined') {
// erroneous CLI call
throw new Error("Empty target version is not supported if node-webkit is the target.");
}
return runtime + '-v' + target_version;
}
module.exports.get_node_webkit_abi = get_node_webkit_abi;
function get_node_abi(runtime, versions) {
if (!runtime) {
throw new Error("get_node_abi requires valid runtime arg");
}
if (!versions) {
throw new Error("get_node_abi requires valid process.versions object");
}
var sem_ver = semver.parse(versions.node);
if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series
// https://github.com/mapbox/node-pre-gyp/issues/124
return runtime+'-v'+versions.node;
} else {
// process.versions.modules added in >= v0.10.4 and v0.11.7
// https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e
return versions.modules ? runtime+'-v' + (+versions.modules) :
'v8-' + versions.v8.split('.').slice(0,2).join('.');
}
}
module.exports.get_node_abi = get_node_abi;
function get_runtime_abi(runtime, target_version) {
if (!runtime) {
throw new Error("get_runtime_abi requires valid runtime arg");
}
if (runtime === 'node-webkit') {
return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']);
} else {
if (runtime != 'node') {
throw new Error("Unknown Runtime: '" + runtime + "'");
}
if (!target_version) {
return get_node_abi(runtime,process.versions);
} else {
var cross_obj;
// abi_crosswalk generated with ./scripts/abi_crosswalk.js
if (abi_crosswalk[target_version]) {
cross_obj = abi_crosswalk[target_version];
} else {
var target_parts = target_version.split('.').map(function(i) { return +i; });
if (target_parts.length != 3) { // parse failed
throw new Error("Unknown target version: " + target_version);
}
/*
The below code tries to infer the last known ABI compatible version
that we have recorded in the abi_crosswalk.json when an exact match
is not possible. The reasons for this to exist are complicated:
- We support passing --target to be able to allow developers to package binaries for versions of node
that are not the same one as they are running. This might also be used in combination with the
--target_arch or --target_platform flags to also package binaries for alternative platforms
- When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node
version that is running in memory
- So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look
this info up for all versions
- But we cannot easily predict what the future ABI will be for released versions
- And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly
by being fully available at install time.
- So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release
need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if
you want the `--target` flag to keep working for the latest version
- Which is impractical ^^
- Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding.
In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that
only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be
ABI compatible with v0.10.33).
TODO: use semver module instead of custom version parsing
*/
var major = target_parts[0];
var minor = target_parts[1];
var patch = target_parts[2];
// io.js: yeah if node.js ever releases 1.x this will break
// but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616
if (major === 1) {
// look for last release that is the same major version
// e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0
while (true) {
if (minor > 0) --minor;
if (patch > 0) --patch;
var new_iojs_target = '' + major + '.' + minor + '.' + patch;
if (abi_crosswalk[new_iojs_target]) {
cross_obj = abi_crosswalk[new_iojs_target];
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target');
break;
}
if (minor === 0 && patch === 0) {
break;
}
}
} else if (major === 0) { // node.js
if (target_parts[1] % 2 === 0) { // for stable/even node.js series
// look for the last release that is the same minor release
// e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0
while (--patch > 0) {
var new_node_target = '' + major + '.' + minor + '.' + patch;
if (abi_crosswalk[new_node_target]) {
cross_obj = abi_crosswalk[new_node_target];
console.log('Warning: node-pre-gyp could not find exact match for ' + target_version);
console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target');
break;
}
}
}
}
}
if (!cross_obj) {
throw new Error("Unsupported target version: " + target_version);
}
// emulate process.versions
var versions_obj = {
node: target_version,
v8: cross_obj.v8+'.0',
// abi_crosswalk uses 1 for node versions lacking process.versions.modules
// process.versions.modules added in >= v0.10.4 and v0.11.7
modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined
};
return get_node_abi(runtime, versions_obj);
}
}
}
module.exports.get_runtime_abi = get_runtime_abi;
var required_parameters = [
'module_name',
'module_path',
'host'
];
function validate_config(package_json) {
var msg = package_json.name + ' package.json is not node-pre-gyp ready:\n';
var missing = [];
if (!package_json.main) {
missing.push('main');
}
if (!package_json.version) {
missing.push('version');
}
if (!package_json.name) {
missing.push('name');
}
if (!package_json.binary) {
missing.push('binary');
}
var o = package_json.binary;
required_parameters.forEach(function(p) {
if (missing.indexOf('binary') > -1) {
missing.pop('binary');
}
if (!o || o[p] === undefined) {
missing.push('binary.' + p);
}
});
if (missing.length >= 1) {
throw new Error(msg+"package.json must declare these properties: \n" + missing.join('\n'));
}
if (o) {
// enforce https over http
var protocol = url.parse(o.host).protocol;
if (protocol === 'http:') {
throw new Error("'host' protocol ("+protocol+") is invalid - only 'https:' is accepted");
}
}
}
module.exports.validate_config = validate_config;
function eval_template(template,opts) {
Object.keys(opts).forEach(function(key) {
var pattern = '{'+key+'}';
while (template.indexOf(pattern) > -1) {
template = template.replace(pattern,opts[key]);
}
});
return template;
}
// url.resolve needs single trailing slash
// to behave correctly, otherwise a double slash
// may end up in the url which breaks requests
// and a lacking slash may not lead to proper joining
function fix_slashes(pathname) {
if (pathname.slice(-1) != '/') {
return pathname + '/';
}
return pathname;
}
// remove double slashes
// note: path.normalize will not work because
// it will convert forward to back slashes
function drop_double_slashes(pathname) {
return pathname.replace(/\/\//g,'/');
}
var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
var default_remote_path = '';
module.exports.evaluate = function(package_json,options) {
options = options || {};
validate_config(package_json);
var v = package_json.version;
var module_version = semver.parse(v);
var runtime = options.runtime || (process.versions['node-webkit'] ? 'node-webkit' : 'node');
var opts = {
name: package_json.name,
configuration: Boolean(options.debug) ? 'Debug' : 'Release',
debug: options.debug,
module_name: package_json.binary.module_name,
version: module_version.version,
prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '',
build: module_version.build.length ? module_version.build.join('.') : '',
major: module_version.major,
minor: module_version.minor,
patch: module_version.patch,
runtime: runtime,
node_abi: get_runtime_abi(runtime,options.target),
target: options.target || '',
platform: options.target_platform || process.platform,
target_platform: options.target_platform || process.platform,
arch: options.target_arch || process.arch,
target_arch: options.target_arch || process.arch,
module_main: package_json.main,
toolset : options.toolset || '' // address https://github.com/mapbox/node-pre-gyp/issues/119
};
opts.host = fix_slashes(eval_template(package_json.binary.host,opts));
opts.module_path = eval_template(package_json.binary.module_path,opts);
// now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably
if (options.module_root) {
// resolve relative to known module root: works for pre-binding require
opts.module_path = path.join(options.module_root,opts.module_path);
} else {
// resolve relative to current working directory: works for node-pre-gyp commands
opts.module_path = path.resolve(opts.module_path);
}
opts.module = path.join(opts.module_path,opts.module_name + '.node');
opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path,opts))) : default_remote_path;
var package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name;
opts.package_name = eval_template(package_name,opts);
opts.staged_tarball = path.join('build/stage',opts.remote_path,opts.package_name);
opts.hosted_path = url.resolve(opts.host,opts.remote_path);
opts.hosted_tarball = url.resolve(opts.hosted_path,opts.package_name);
return opts;
};

View File

@@ -1,8 +0,0 @@
language: node_js
node_js:
- "0.8"
- "0.10"
- "0.12"
- "iojs"
before_install:
- npm install -g npm@~1.4.6

View File

@@ -1,21 +0,0 @@
Copyright 2010 James Halliday (mail@substack.net)
This project is free software released under the MIT/X11 license:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -1,33 +0,0 @@
#!/usr/bin/env node
var mkdirp = require('../');
var minimist = require('minimist');
var fs = require('fs');
var argv = minimist(process.argv.slice(2), {
alias: { m: 'mode', h: 'help' },
string: [ 'mode' ]
});
if (argv.help) {
fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout);
return;
}
var paths = argv._.slice();
var mode = argv.mode ? parseInt(argv.mode, 8) : undefined;
(function next () {
if (paths.length === 0) return;
var p = paths.shift();
if (mode === undefined) mkdirp(p, cb)
else mkdirp(p, mode, cb)
function cb (err) {
if (err) {
console.error(err.message);
process.exit(1);
}
else next();
}
})();

View File

@@ -1,12 +0,0 @@
usage: mkdirp [DIR1,DIR2..] {OPTIONS}
Create each supplied directory including any necessary parent directories that
don't yet exist.
If the directory already exists, do nothing.
OPTIONS are:
-m, --mode If a directory needs to be created, set the mode as an octal
permission string.

View File

@@ -1,6 +0,0 @@
var mkdirp = require('mkdirp');
mkdirp('/tmp/foo/bar/baz', function (err) {
if (err) console.error(err)
else console.log('pow!')
});

View File

@@ -1,98 +0,0 @@
var path = require('path');
var fs = require('fs');
var _0777 = parseInt('0777', 8);
module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
function mkdirP (p, opts, f, made) {
if (typeof opts === 'function') {
f = opts;
opts = {};
}
else if (!opts || typeof opts !== 'object') {
opts = { mode: opts };
}
var mode = opts.mode;
var xfs = opts.fs || fs;
if (mode === undefined) {
mode = _0777 & (~process.umask());
}
if (!made) made = null;
var cb = f || function () {};
p = path.resolve(p);
xfs.mkdir(p, mode, function (er) {
if (!er) {
made = made || p;
return cb(null, made);
}
switch (er.code) {
case 'ENOENT':
mkdirP(path.dirname(p), opts, function (er, made) {
if (er) cb(er, made);
else mkdirP(p, opts, cb, made);
});
break;
// In the case of any other error, just see if there's a dir
// there already. If so, then hooray! If not, then something
// is borked.
default:
xfs.stat(p, function (er2, stat) {
// if the stat fails, then that's super weird.
// let the original error be the failure reason.
if (er2 || !stat.isDirectory()) cb(er, made)
else cb(null, made);
});
break;
}
});
}
mkdirP.sync = function sync (p, opts, made) {
if (!opts || typeof opts !== 'object') {
opts = { mode: opts };
}
var mode = opts.mode;
var xfs = opts.fs || fs;
if (mode === undefined) {
mode = _0777 & (~process.umask());
}
if (!made) made = null;
p = path.resolve(p);
try {
xfs.mkdirSync(p, mode);
made = made || p;
}
catch (err0) {
switch (err0.code) {
case 'ENOENT' :
made = sync(path.dirname(p), opts, made);
sync(p, opts, made);
break;
// In the case of any other error, just see if there's a dir
// there already. If so, then hooray! If not, then something
// is borked.
default:
var stat;
try {
stat = xfs.statSync(p);
}
catch (err1) {
throw err0;
}
if (!stat.isDirectory()) throw err0;
break;
}
}
return made;
};

View File

@@ -1,4 +0,0 @@
language: node_js
node_js:
- "0.8"
- "0.10"

View File

@@ -1,18 +0,0 @@
This software is released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -1,2 +0,0 @@
var argv = require('../')(process.argv.slice(2));
console.dir(argv);

View File

@@ -1,187 +0,0 @@
module.exports = function (args, opts) {
if (!opts) opts = {};
var flags = { bools : {}, strings : {} };
[].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
flags.bools[key] = true;
});
[].concat(opts.string).filter(Boolean).forEach(function (key) {
flags.strings[key] = true;
});
var aliases = {};
Object.keys(opts.alias || {}).forEach(function (key) {
aliases[key] = [].concat(opts.alias[key]);
aliases[key].forEach(function (x) {
aliases[x] = [key].concat(aliases[key].filter(function (y) {
return x !== y;
}));
});
});
var defaults = opts['default'] || {};
var argv = { _ : [] };
Object.keys(flags.bools).forEach(function (key) {
setArg(key, defaults[key] === undefined ? false : defaults[key]);
});
var notFlags = [];
if (args.indexOf('--') !== -1) {
notFlags = args.slice(args.indexOf('--')+1);
args = args.slice(0, args.indexOf('--'));
}
function setArg (key, val) {
var value = !flags.strings[key] && isNumber(val)
? Number(val) : val
;
setKey(argv, key.split('.'), value);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), value);
});
}
for (var i = 0; i < args.length; i++) {
var arg = args[i];
if (/^--.+=/.test(arg)) {
// Using [\s\S] instead of . because js doesn't support the
// 'dotall' regex modifier. See:
// http://stackoverflow.com/a/1068308/13216
var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
setArg(m[1], m[2]);
}
else if (/^--no-.+/.test(arg)) {
var key = arg.match(/^--no-(.+)/)[1];
setArg(key, false);
}
else if (/^--.+/.test(arg)) {
var key = arg.match(/^--(.+)/)[1];
var next = args[i + 1];
if (next !== undefined && !/^-/.test(next)
&& !flags.bools[key]
&& (aliases[key] ? !flags.bools[aliases[key]] : true)) {
setArg(key, next);
i++;
}
else if (/^(true|false)$/.test(next)) {
setArg(key, next === 'true');
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true);
}
}
else if (/^-[^-]+/.test(arg)) {
var letters = arg.slice(1,-1).split('');
var broken = false;
for (var j = 0; j < letters.length; j++) {
var next = arg.slice(j+2);
if (next === '-') {
setArg(letters[j], next)
continue;
}
if (/[A-Za-z]/.test(letters[j])
&& /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
setArg(letters[j], next);
broken = true;
break;
}
if (letters[j+1] && letters[j+1].match(/\W/)) {
setArg(letters[j], arg.slice(j+2));
broken = true;
break;
}
else {
setArg(letters[j], flags.strings[letters[j]] ? '' : true);
}
}
var key = arg.slice(-1)[0];
if (!broken && key !== '-') {
if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
&& !flags.bools[key]
&& (aliases[key] ? !flags.bools[aliases[key]] : true)) {
setArg(key, args[i+1]);
i++;
}
else if (args[i+1] && /true|false/.test(args[i+1])) {
setArg(key, args[i+1] === 'true');
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true);
}
}
}
else {
argv._.push(
flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
);
}
}
Object.keys(defaults).forEach(function (key) {
if (!hasKey(argv, key.split('.'))) {
setKey(argv, key.split('.'), defaults[key]);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), defaults[key]);
});
}
});
notFlags.forEach(function(key) {
argv._.push(key);
});
return argv;
};
function hasKey (obj, keys) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
o = (o[key] || {});
});
var key = keys[keys.length - 1];
return key in o;
}
function setKey (obj, keys, value) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
if (o[key] === undefined) o[key] = {};
o = o[key];
});
var key = keys[keys.length - 1];
if (o[key] === undefined || typeof o[key] === 'boolean') {
o[key] = value;
}
else if (Array.isArray(o[key])) {
o[key].push(value);
}
else {
o[key] = [ o[key], value ];
}
}
function isNumber (x) {
if (typeof x === 'number') return true;
if (/^0x[0-9a-f]+$/i.test(x)) return true;
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
}
function longest (xs) {
return Math.max.apply(null, xs.map(function (x) { return x.length }));
}

View File

@@ -1,67 +0,0 @@
{
"name": "minimist",
"version": "0.0.8",
"description": "parse argument options",
"main": "index.js",
"devDependencies": {
"tape": "~1.0.4",
"tap": "~0.4.0"
},
"scripts": {
"test": "tap test/*.js"
},
"testling": {
"files": "test/*.js",
"browsers": [
"ie/6..latest",
"ff/5",
"firefox/latest",
"chrome/10",
"chrome/latest",
"safari/5.1",
"safari/latest",
"opera/12"
]
},
"repository": {
"type": "git",
"url": "git://github.com/substack/minimist.git"
},
"homepage": "https://github.com/substack/minimist",
"keywords": [
"argv",
"getopt",
"parser",
"optimist"
],
"author": {
"name": "James Halliday",
"email": "mail@substack.net",
"url": "http://substack.net"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/substack/minimist/issues"
},
"_id": "minimist@0.0.8",
"dist": {
"shasum": "857fcabfc3397d2625b8228262e86aa7a011b05d",
"tarball": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz"
},
"_from": "minimist@0.0.8",
"_npmVersion": "1.4.3",
"_npmUser": {
"name": "substack",
"email": "mail@substack.net"
},
"maintainers": [
{
"name": "substack",
"email": "mail@substack.net"
}
],
"directories": {},
"_shasum": "857fcabfc3397d2625b8228262e86aa7a011b05d",
"_resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
"readme": "ERROR: No README data found!"
}

View File

@@ -1,73 +0,0 @@
# minimist
parse argument options
This module is the guts of optimist's argument parser without all the
fanciful decoration.
[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist)
[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist)
# example
``` js
var argv = require('minimist')(process.argv.slice(2));
console.dir(argv);
```
```
$ node example/parse.js -a beep -b boop
{ _: [], a: 'beep', b: 'boop' }
```
```
$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz
{ _: [ 'foo', 'bar', 'baz' ],
x: 3,
y: 4,
n: 5,
a: true,
b: true,
c: true,
beep: 'boop' }
```
# methods
``` js
var parseArgs = require('minimist')
```
## var argv = parseArgs(args, opts={})
Return an argument object `argv` populated with the array arguments from `args`.
`argv._` contains all the arguments that didn't have an option associated with
them.
Numeric-looking arguments will be returned as numbers unless `opts.string` or
`opts.boolean` is set for that argument name.
Any arguments after `'--'` will not be parsed and will end up in `argv._`.
options can be:
* `opts.string` - a string or array of strings argument names to always treat as
strings
* `opts.boolean` - a string or array of strings to always treat as booleans
* `opts.alias` - an object mapping string names to strings or arrays of string
argument names to use as aliases
* `opts.default` - an object mapping string argument names to default values
# install
With [npm](https://npmjs.org) do:
```
npm install minimist
```
# license
MIT

Some files were not shown because too many files have changed in this diff Show More