first commit

This commit is contained in:
developertrinidad08
2023-01-16 18:11:14 -03:00
commit 7e6cf29479
233 changed files with 26791 additions and 0 deletions

0
node_modules/streamers/README generated vendored Normal file
View File

8
node_modules/streamers/index.js generated vendored Normal file
View File

@ -0,0 +1,8 @@
var bufferStreamModule = require("./lib/buffer_stream");
var proactiveReadStreamModule = require("./lib/proactive_read_stream");
exports.BufferReadStream = bufferStreamModule.BufferReadStream;
exports.BufferWriteStream = bufferStreamModule.BufferWriteStream;
exports.ProactiveReadStream = proactiveReadStreamModule.ProactiveReadStream;

320
node_modules/streamers/lib/buffer_stream.js generated vendored Normal file
View File

@ -0,0 +1,320 @@
var stream = require("stream");
var util = require("util");
var _ = require("underscore");
var emptyBuffer = new Buffer(0);
function BufferReadStream(source, options){
stream.Stream.call(this);
options = _.extend({
readDelay: 0,
paused: false
}, options);
this._source = source;
this._start = 0;
this._readChunkSize = options.chunkSize || source.length;
this._readDelay = options.readDelay;
this.readable = true;
this.paused = options.paused;
this._read();
}
util.inherits(BufferReadStream, stream.Stream);
BufferReadStream.prototype.setEncoding = function(encoding){
var StringDecoder = require("string_decoder").StringDecoder;
this._decoder = new StringDecoder(encoding);
};
BufferReadStream.prototype.pause = function(){
this.paused = true;
};
BufferReadStream.prototype.resume = function(){
this.paused = false;
this._read();
};
BufferReadStream.prototype.destroy = function(){
this.readable = false;
clearTimeout(this._timeoutId);
};
BufferReadStream.prototype._read = function(){
var self = this;
function emitReadChunk(){
self._emitReadChunk();
}
var readDelay = this._readDelay;
if(readDelay !== 0){
this._timeoutId = setTimeout(emitReadChunk, readDelay);
}
else{
process.nextTick(emitReadChunk);
}
};
BufferReadStream.prototype._emitReadChunk = function(){
if(this.paused || !this.readable) return;
var chunkSize = Math.min(this._source.length - this._start, this._readChunkSize);
if(chunkSize === 0){
this.readable = false;
this.emit("end");
return;
}
var sourceEnd = this._start + chunkSize;
var chunk = this._source.slice(this._start, sourceEnd);
this._start = sourceEnd;
this._read();
if(this._decoder){
var string = this._decoder.write(chunk);
if(string.length){
this.emit("data", string);
}
}
else{
this.emit("data", chunk);
}
};
function BufferWriteStream(options){
stream.Stream.call(this);
options = _.extend({
onFull: onFull,
onEnd: function(){},
minBlockAllocSize: 0,
drainDelay:0
}, options);
this._onFull = options.onFull;
this._onEnd = options.onEnd;
this._onWrite = options.onWrite;
this._minBlockAllocSize = options.minBlockAllocSize;
this._maxBlockAllocSize = options.maxBlockAllocSize;
this._drainDelay = options.drainDelay;
this._buffer = new Buffer(options.minBlockAllocSize);
this._destination = this._buffer;
this._destinationPos = 0;
this._writeQueue = [];
this._pendingOnFull = false;
this._pendingQueueDrain = false;
this.writable = true;
this.bytesWritten = 0;
}
util.inherits(BufferWriteStream, stream.Stream);
BufferWriteStream.prototype.getBuffer = function(){
return this._buffer;
};
BufferWriteStream.prototype.write = function(data, encoding){
if(!this.writable){
throw new Error("stream is not writable");
}
if(!Buffer.isBuffer(data)){
data = new Buffer(data, encoding);
}
if(data.length){
this._writeQueue.push(data);
}
this._commit();
return this._writeQueue.length === 0;
};
BufferWriteStream.prototype._commit = function(){
var self = this;
var destination = this._destination;
var writeQueue = this._writeQueue;
var startDestinationPos = this._destinationPos;
while(writeQueue.length && destination.length){
var head = writeQueue[0];
var copySize = Math.min(destination.length, head.length);
head.copy(destination, 0, 0, copySize);
head = head.slice(copySize);
destination = destination.slice(copySize);
this.bytesWritten += copySize;
this._destinationPos += copySize;
if(head.length === 0){
writeQueue.shift();
}
else{
writeQueue[0] = head;
}
}
this._destination = destination;
bytesCommitted = this._destinationPos - startDestinationPos;
if(bytesCommitted){
if(this._onWrite){
if(writeQueue.length){
this._pendingQueueDrain = true;
}
// By locking destination the buffer is frozen and the onWrite
// callback cannot miss any write commits
this._destination = emptyBuffer;
var consumer = this._onWrite;
this._onWrite = null;
consumer.call(this, function(nextCallback){
process.nextTick(function(){
self._destination = destination;
self._onWrite = nextCallback;
self._commit();
});
}, consumer);
return;
}
}
if(writeQueue.length){
this._pendingQueueDrain = true;
this._growBuffer();
}
else if(this._pendingQueueDrain){
this._pendingQueueDrain = false;
if(this._drainDelay !== 0){
setTimeout(function(){
self.emit("drain");
}, this._drainDelay);
}
else{
process.nextTick(function(){
self.emit("drain");
});
}
}
};
BufferWriteStream.prototype._growBuffer = function(){
var self = this;
var writeQueue = this._writeQueue;
var requestSize = this._minBlockAllocSize;
var maxBlockAllocSize = this._maxBlockAllocSize;
var add = (maxBlockAllocSize === undefined ? function(a, b){return a + b;} : function(a, b){return Math.min(a + b, maxBlockAllocSize);});
for(var i = 0, queueLength = writeQueue.length; i < queueLength; i++){
requestSize = add(requestSize, writeQueue[i].length);
}
// Prevent concurrent onFull callbacks
if(this._pendingOnFull){
return;
}
this._pendingOnFull = true;
this._onFull(this._buffer, requestSize, function(buffer, destination){
process.nextTick(function(){
self._pendingOnFull = false;
if(!destination){
if(self.writable){
self.emit("error", new Error("buffer is full"));
}
self.destroy();
return;
}
self._buffer = buffer;
self._destination = destination;
self._commit();
});
});
};
BufferWriteStream.prototype.end = function(data, encoding){
var self = this;
function _end(){
self.writable = false;
self._onEnd();
}
if(data){
if(this.write(data, encoding)){
_end();
}else{
self.writable = false;
this.once("drain", _end);
}
}
else{
_end();
}
};
BufferWriteStream.prototype.destroy = function(){
this.writable = false;
this._pendingQueueDrain = false;
this._writeQueue = [];
};
BufferWriteStream.prototype.consume = function(consume){
this._buffer = this._buffer.slice(consume);
this._destinationPos -= consume;
};
BufferWriteStream.prototype.getCommittedSlice = function(){
return this._buffer.slice(0, this._destinationPos);
};
function onFull(buffer, extraSize, callback){
var newBuffer = new Buffer(buffer.length + extraSize);
buffer.copy(newBuffer);
callback(newBuffer, newBuffer.slice(buffer.length));
}
exports.BufferReadStream = BufferReadStream;
exports.BufferWriteStream = BufferWriteStream;

154
node_modules/streamers/lib/proactive_read_stream.js generated vendored Normal file
View File

@ -0,0 +1,154 @@
var _ = require("underscore");
var bufferStreamModule = require("./buffer_stream");
var BufferWriteStream = bufferStreamModule.BufferWriteStream;
function ProactiveReadStream(source, writeStreamOptions){
var self = this;
function consumer(callback){
if(self._readCompletionCondition){
self._completeReadOperation();
}
callback(consumer);
}
function onEnd(){
self.cancel("end of stream");
}
this._writeStream = new BufferWriteStream(_.extend(writeStreamOptions || {},{
onWrite: consumer,
onEnd: onEnd
}));
source.pipe(this._writeStream);
}
ProactiveReadStream.prototype._completeReadOperation = function(){
if(!this._readCompletionCondition){
return;
}
var consume = 0;
var committedSlice = this._writeStream.getCommittedSlice();
consume = this._readCompletionCondition(committedSlice);
if(consume){
var consumedSlice = committedSlice.slice(0, consume);
this._writeStream.consume(consume);
this._readCompletionCondition = null;
if(this._readCompletionCallback){
var completionCallback = this._readCompletionCallback;
this._readCompletionCallback = null;
completionCallback(consumedSlice);
}
}
};
ProactiveReadStream.prototype.read = function(condition, callback){
if(condition === null){
condition = everything();
}
else if(condition.constructor === Number){
condition = bytes(condition);
}
else if(condition.constructor === String){
condition = until(condition);
}
if(this._readCompletionCondition){
throw new Error("read operation in progress");
}
this._readCompletionCondition = condition;
this._readCompletionCallback = callback;
var self = this;
process.nextTick(function(){
self._completeReadOperation();
});
};
ProactiveReadStream.prototype.cancel = function(message){
message = message || "operation cancelled";
this._readCompletionCondition = null;
if(this._readCompletionCallback){
var callback = this._readCompletionCallback;
this._readCompletionCallback = null;
callback(null, new Error(message));
}
};
function everything(){
return function(buffer){
return buffer.length;
};
}
function bytes(n){
return function(buffer){
if(buffer.length >= n){
return n;
}
else{
return 0;
}
};
}
function until(delimiter, encoding){
if(!Buffer.isBuffer(delimiter)){
delimiter = new Buffer(delimiter, encoding);
}
var delimiterLength = delimiter.length;
var bufferIndex = 0;
return function(buffer){
var end = buffer.length - delimiterLength + 1;
var match;
for(; bufferIndex < end; bufferIndex++){
match = true;
for(var i = 0; i < delimiterLength; i++){
if(delimiter[i] !== buffer[bufferIndex + i]){
match = false;
break;
}
}
if(match){
var consume = bufferIndex + delimiterLength;
bufferIndex = 0;
return consume;
}
}
return 0;
};
}
exports.ProactiveReadStream = ProactiveReadStream;
exports.bytes = bytes;
exports.until = until;
exports.everything = everything;

31
node_modules/streamers/package.json generated vendored Normal file
View File

@ -0,0 +1,31 @@
{
"name": "streamers",
"description": "Stream classes library",
"version": "0.1.1",
"maintainers": [
{
"name": "Graham Daws",
"email": "graham.daws@gmail.com"
}
],
"keywords": [
"stream",
"memorystream",
"bufferstream",
"proactor"
],
"dependencies": {
"underscore":"1.2.3",
"vows": "0.6.1"
},
"licenses":[
{
"type": "MIT",
"url": "http://www.opensource.org/licenses/MIT"
}
],
"repository":{
"type":"git",
"url": "https://gdaws@github.com/gdaws/node-streamers.git"
}
}

517
node_modules/streamers/test/buffer_stream_test.js generated vendored Normal file
View File

@ -0,0 +1,517 @@
var vows = require("vows");
var assert = require("assert");
var index = require("../index");
function createTestReadStream(options){
return new index.BufferReadStream(new Buffer("jibberish"), options);
}
function createTestWriteStream(options){
return new index.BufferWriteStream(options);
}
vows.describe("Buffer IO Streams").addBatch({
"BufferReadStream Test":{
"Single Chunk Data Emission":{
topic: function(){
var stream = createTestReadStream();
var callback = this.callback;
stream.on("data", function(data){
callback(null, data);
});
},
"should receive all": function(data){
assert.isTrue(Buffer.isBuffer(data));
assert.equal(data.toString(), "jibberish");
}
},
"Single Chunk End Emission":{
topic: function(){
var stream = createTestReadStream();
stream.on("end", this.callback);
},
"should be called": function(){}
},
"Multiple Chunk Data Emissions":{
topic: function(){
var stream = createTestReadStream({
chunkSize: 4
});
var callback = this.callback;
var chunks = 0;
stream.on("data", function(data){
[
function(){
if(data.toString() != "jibb"){
callback("chunk 1 incorrect");
}
},
function(){
if(data.toString() != "eris"){
callback("chunk 2 incorrect");
}
},
function(){
if(data.toString() != "h"){
callback("chunk 3 incorrect");
}
callback(null, new Buffer("jibberish"));
}
][chunks]();
chunks += 1;
});
},
"should receive all": function(data){
assert.isTrue(Buffer.isBuffer(data));
assert.equal(data.toString(), "jibberish");
}
},
"Multiple Chunk End Emission":{
topic: function(){
var stream = createTestReadStream({
chunkSize: 4
});
var callback = this.callback;
stream.on("end", function(){
callback();
});
},
"should be called": function(){}
},
"Chunk size greater than Buffer length":{
topic: function(){
var stream = createTestReadStream({
chunkSize: 100
});
var callback = this.callback;
stream.on("data", function(data){
callback(null, data);
});
},
"doesn't over-read": function(data){
assert.equal(data.length, 9);
assert.equal(data.toString(), "jibberish");
}
},
"Long read time": {
topic: function(){
var stream = createTestReadStream({
chunkSize: 4,
readDelay: 10
});
var callback = this.callback;
var dataBuffer = "";
stream.on("data", function(data){
dataBuffer += data.toString();
});
stream.on("end", function(){
if(dataBuffer != "jibberish"){
callback("chunks incorrect");
}
callback();
});
},
"should end": function(){}
},
"Paused": {
topic: function(){
var stream = createTestReadStream({
paused: true
});
var callback = this.callback;
stream.on("data", function(){
callback("not paused");
});
stream.on("end", function(){
callback("not paused");
});
setTimeout(function(){
callback();
}, 1);
},
"should timeout": function(){}
},
"Read, Pause and Resume":{
topic: function(){
var stream = createTestReadStream({
chunkSize: 1
});
var callback = this.callback;
var dataBuffer = "";
stream.on("data", function(data){
dataBuffer += data.toString();
stream.pause();
process.nextTick(function(){
stream.resume();
});
});
stream.on("end", function(){
if(dataBuffer != "jibberish"){
callback("chunks incorrect");
}
callback();
});
},
"should end": function(){}
},
"End": {
topic: function(){
var stream = createTestReadStream();
var callback = this.callback;
stream.on("end", function(){
callback(null, stream);
});
},
"should be unreadable": function(stream){
assert.isFalse(stream.readable);
}
},
"Destroy": {
topic: function(){
var stream = createTestReadStream();
var callback = this.callback;
stream.on("data", function(){
callback("should not have received data event");
});
stream.on("end", function(){
callback("should not have received end event");
});
stream.destroy();
setTimeout(function(){
callback(null, stream);
}, 1);
},
"should timeout": function(){},
"should be unreadable": function(stream){
assert.isFalse(stream.readable);
}
},
"Ascii Encoding":{
topic: function(){
var stream = createTestReadStream();
stream.setEncoding("ascii");
var callback = this.callback;
stream.on("data", function(data){callback(null, data);});
},
"should receive ascii string": function(data){
assert.equal(data, "jibberish");
}
}
},
"BufferWriteStream Test":{
"Instant Commits":{
topic: function(){
var stream = createTestWriteStream({
minBlockAllocSize:2
});
return stream.write("J") && stream.write("i");
},
"should return true": function(value){
assert.isTrue(value);
}
},
"Pending Commits":{
topic: function(){
var stream = createTestWriteStream();
var callback = this.callback;
if(stream.write("Jibberish")){
callback("expecting delayed commit");
}
stream.on("drain", function(){
callback(null, stream);
});
},
"should drain": function(){},
"count bytes written": function(stream){
assert.equal(stream.bytesWritten, 9);
},
"data written": function(stream){
assert.equal(stream.getBuffer().toString(), "Jibberish");
}
},
"No events after destroy":{
topic: function(){
var stream = createTestWriteStream();
var callback = this.callback;
stream.write("Jibberish");
stream.on("drain", function(){
callback("should not have received drain event");
});
stream.on("error", function(){
callback("should not have received error event");
});
stream.destroy();
setTimeout(function(){
callback(null, stream);
}, 1);
},
"should timeout": function(){},
"should not be writable": function(stream){
assert.isFalse(stream.writable);
},
"should have zero bytes written": function(stream){
assert.equal(stream.bytesWritten, 0);
}
},
"onFull":{
topic: function(){
var callback = this.callback;
var stream = createTestWriteStream({
onFull:function(){
process.nextTick(callback);
}
});
stream.write("test");
},
"should be called": function(){}
},
"onEnd": {
topic: function(){
var callback = this.callback;
var stream = createTestWriteStream({
onEnd: function(){
process.nextTick(function(){
callback(null, stream);
});
}
});
stream.end("Jibberish");
},
"should not be writable": function(stream){
assert.isFalse(stream.writable);
},
"has written data": function(stream){
assert.equal(stream.getBuffer().toString(), "Jibberish");
assert.equal(stream.bytesWritten, 9);
}
},
"grow buffer":{
topic: function(){
var callback = this.callback;
var stream = createTestWriteStream({
onFull:function(buffer, extraSize, callback){
var newBuffer = new Buffer(buffer.length + extraSize);
buffer.copy(newBuffer);
callback(newBuffer, newBuffer.slice(buffer.length));
}
});
stream.write("Jibberish");
stream.on("drain", function(){
callback(null, stream);
});
},
"should drain": function(){},
"data written": function(stream){
assert.equal(stream.getBuffer().toString(), "Jibberish");
}
},
"Flow control":{
topic:function(){
var callback = this.callback;
var stream = createTestWriteStream({
onEnd: function(){
process.nextTick(function(){
callback(null, stream);
});
}
});
stream.on("drain", dequeue);
var queue = ["First", "Second", "Third"];
function dequeue(){
if(!queue.length){
stream.end();
return;
}
if(stream.write(queue.shift())){
callback("should not have committed");
}
}
dequeue();
},
"data written": function(stream){
assert.equal(stream.bytesWritten, 16);
assert.equal(stream.getBuffer().toString(), "FirstSecondThird");
}
},
"No write queue":{
topic: function(){
var stream = createTestWriteStream({
minBlockAllocSize: 9
});
return [stream.write("jibberish"), stream];
},
"write should return true": function(value){
assert.isTrue(value[0]);
},
"data should be written to buffer": function(value){
assert.equal(value[1].getBuffer().toString(), "jibberish");
assert.equal(value[1].bytesWritten, 9);
},
"stream should be writeable":function(value){
assert.isTrue(value[1].writable);
}
},
"consume onWrite": {
topic: function(){
var callback = this.callback;
var stream = createTestWriteStream({
onWrite: function(continuation, thisCallback){
this.consume(this.getCommittedSlice().length);
continuation(thisCallback);
process.nextTick(function(){
callback(null, stream);
});
}
});
stream.write("jibberish");
},
"should have empty buffer": function(stream){
assert.equal(stream.getBuffer().length, 0);
},
"should be writable": function(stream){
assert.isTrue(stream.writable);
},
"should have bytes written": function(stream){
assert.equal(stream.bytesWritten, 9);
}
},
"chunked write": {
topic: function(){
var callback = this.callback;
var lastCommitLength = 0;
var stream = createTestWriteStream({
maxBlockAllocSize: 4,
onWrite: function(callback, thisCallback){
var committedSlice = this.getCommittedSlice();
if(committedSlice.length - lastCommitLength > 4){
callback("chunk size too large");
}
lastCommitLength = committedSlice.length;
callback(thisCallback);
}
});
if(!stream.write("jibberish")){
stream.on("drain", function(){
callback(null, stream);
});
}
else{
callback("was expecting write to return false");
}
},
"data written": function(stream){
assert.equal(stream.getCommittedSlice().toString(), "jibberish");
},
"bytes written": function(stream){
assert.equal(stream.bytesWritten, 9);
}
}
}
})["export"](module);

View File

@ -0,0 +1,177 @@
var vows = require("vows");
var assert = require("assert");
var index = require("../index");
vows.describe("Proactor Read Operations").addBatch({
"ProactiveReadStream Test":{
"Read Number of Bytes":{
topic: function(){
var callback = this.callback;
var source = new index.BufferReadStream(new Buffer("jibberish"), {chunkSize: 4});
var reader = new index.ProactiveReadStream(source);
function firstRead(){
reader.read(2, function(data, error){
if(data){
if(data.toString() == "ji"){
secondRead();
}
else{
callback("first read returned wrong data");
}
}
else{
callback(error);
}
});
}
function secondRead(){
reader.read(4, function(data, error){
if(data){
if(data.toString() == "bber"){
thirdRead();
}
else{
callback("second read returned wrong data");
}
}
else{
callback(error);
}
});
}
function thirdRead(){
reader.read(3, function(data, error){
if(data){
if(data.toString() == "ish"){
callback();
}
else{
callback("third read returned wrong data");
}
}
else{
callback(error);
}
});
}
firstRead();
},
"finish": function(){}
}
},
"Read Until":{
topic:function(){
var callback = this.callback;
var source = new index.BufferReadStream(new Buffer("line 1\nline 2\n"));
var reader = new index.ProactiveReadStream(source);
function readFirstLine(){
reader.read("\n", function(data, error){
if(data){
if(data.toString() == "line 1\n"){
readSecondLine();
}
else{
callback("first read returned wrong data");
}
}
else{
callback(error);
}
});
}
function readSecondLine(){
reader.read("\n", function(data, error){
if(data){
if(data.toString() == "line 2\n"){
callback();
}
else{
callback("second read returned wrong data");
}
}
else{
callback(error);
}
});
}
readFirstLine();
},
"finish": function(){}
},
"Read soon as possible":{
topic:function(){
var callback = this.callback;
var source = new index.BufferReadStream(new Buffer("jibberish"), {chunkSize:4});
var reader = new index.ProactiveReadStream(source);
function readFirstChunk(){
reader.read(null, function(data, error){
if(data){
if(data == "jibb"){
readSecondChunk();
}
else{
callback("first read returned wrong data");
}
}
else{
callback(error);
}
});
}
function readSecondChunk(){
reader.read(null, function(data, error){
if(data){
if(data == "eris"){
readThirdChunk();
}
else{
callback("second read returned wrong data");
}
}
else{
callback(error);
}
});
}
function readThirdChunk(){
reader.read(null, function(data, error){
if(data){
if(data == "h"){
callback();
}
else{
callback("third read returned wrong data");
}
}
else{
callback(error);
}
});
}
readFirstChunk();
},
"finish": function(){}
}
})["export"](module);