|
|
|
@ -81,7 +81,8 @@ function _write(chunk, encoding, callback) { |
|
|
|
|
batch[grpc.opType.SEND_MESSAGE] = this.serialize(chunk); |
|
|
|
|
this.call.startBatch(batch, function(err, event) { |
|
|
|
|
if (err) { |
|
|
|
|
throw err; |
|
|
|
|
// Something has gone wrong. Stop writing by failing to call callback
|
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
callback(); |
|
|
|
|
}); |
|
|
|
@ -120,7 +121,9 @@ function _read(size) { |
|
|
|
|
*/ |
|
|
|
|
function readCallback(err, event) { |
|
|
|
|
if (err) { |
|
|
|
|
throw err; |
|
|
|
|
// Something has gone wrong. Stop reading and wait for status
|
|
|
|
|
self.finished = true; |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
if (self.finished) { |
|
|
|
|
self.push(null); |
|
|
|
@ -237,10 +240,6 @@ function makeUnaryRequestFunction(method, serialize, deserialize) { |
|
|
|
|
client_batch[grpc.opType.RECV_MESSAGE] = true; |
|
|
|
|
client_batch[grpc.opType.RECV_STATUS_ON_CLIENT] = true; |
|
|
|
|
call.startBatch(client_batch, function(err, response) { |
|
|
|
|
if (err) { |
|
|
|
|
callback(err); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
emitter.emit('status', response.status); |
|
|
|
|
if (response.status.code !== grpc.status.OK) { |
|
|
|
|
var error = new Error(response.status.details); |
|
|
|
@ -248,6 +247,12 @@ function makeUnaryRequestFunction(method, serialize, deserialize) { |
|
|
|
|
error.metadata = response.status.metadata; |
|
|
|
|
callback(error); |
|
|
|
|
return; |
|
|
|
|
} else { |
|
|
|
|
if (err) { |
|
|
|
|
// Got a batch error, but OK status. Something went wrong
|
|
|
|
|
callback(err); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
emitter.emit('metadata', response.metadata); |
|
|
|
|
callback(null, deserialize(response.read)); |
|
|
|
@ -300,7 +305,8 @@ function makeClientStreamRequestFunction(method, serialize, deserialize) { |
|
|
|
|
metadata_batch[grpc.opType.RECV_INITIAL_METADATA] = true; |
|
|
|
|
call.startBatch(metadata_batch, function(err, response) { |
|
|
|
|
if (err) { |
|
|
|
|
callback(err); |
|
|
|
|
// The call has stopped for some reason. A non-OK status will arrive
|
|
|
|
|
// in the other batch.
|
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
stream.emit('metadata', response.metadata); |
|
|
|
@ -309,10 +315,6 @@ function makeClientStreamRequestFunction(method, serialize, deserialize) { |
|
|
|
|
client_batch[grpc.opType.RECV_MESSAGE] = true; |
|
|
|
|
client_batch[grpc.opType.RECV_STATUS_ON_CLIENT] = true; |
|
|
|
|
call.startBatch(client_batch, function(err, response) { |
|
|
|
|
if (err) { |
|
|
|
|
callback(err); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
stream.emit('status', response.status); |
|
|
|
|
if (response.status.code !== grpc.status.OK) { |
|
|
|
|
var error = new Error(response.status.details); |
|
|
|
@ -320,6 +322,12 @@ function makeClientStreamRequestFunction(method, serialize, deserialize) { |
|
|
|
|
error.metadata = response.status.metadata; |
|
|
|
|
callback(error); |
|
|
|
|
return; |
|
|
|
|
} else { |
|
|
|
|
if (err) { |
|
|
|
|
// Got a batch error, but OK status. Something went wrong
|
|
|
|
|
callback(err); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
callback(null, deserialize(response.read)); |
|
|
|
|
}); |
|
|
|
@ -373,16 +381,15 @@ function makeServerStreamRequestFunction(method, serialize, deserialize) { |
|
|
|
|
start_batch[grpc.opType.SEND_CLOSE_FROM_CLIENT] = true; |
|
|
|
|
call.startBatch(start_batch, function(err, response) { |
|
|
|
|
if (err) { |
|
|
|
|
throw err; |
|
|
|
|
// The call has stopped for some reason. A non-OK status will arrive
|
|
|
|
|
// in the other batch.
|
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
stream.emit('metadata', response.metadata); |
|
|
|
|
}); |
|
|
|
|
var status_batch = {}; |
|
|
|
|
status_batch[grpc.opType.RECV_STATUS_ON_CLIENT] = true; |
|
|
|
|
call.startBatch(status_batch, function(err, response) { |
|
|
|
|
if (err) { |
|
|
|
|
throw err; |
|
|
|
|
} |
|
|
|
|
stream.emit('status', response.status); |
|
|
|
|
if (response.status.code !== grpc.status.OK) { |
|
|
|
|
var error = new Error(response.status.details); |
|
|
|
@ -390,6 +397,12 @@ function makeServerStreamRequestFunction(method, serialize, deserialize) { |
|
|
|
|
error.metadata = response.status.metadata; |
|
|
|
|
stream.emit('error', error); |
|
|
|
|
return; |
|
|
|
|
} else { |
|
|
|
|
if (err) { |
|
|
|
|
// Got a batch error, but OK status. Something went wrong
|
|
|
|
|
stream.emit('error', err); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
}); |
|
|
|
|
}); |
|
|
|
@ -438,16 +451,15 @@ function makeBidiStreamRequestFunction(method, serialize, deserialize) { |
|
|
|
|
start_batch[grpc.opType.RECV_INITIAL_METADATA] = true; |
|
|
|
|
call.startBatch(start_batch, function(err, response) { |
|
|
|
|
if (err) { |
|
|
|
|
throw err; |
|
|
|
|
// The call has stopped for some reason. A non-OK status will arrive
|
|
|
|
|
// in the other batch.
|
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
stream.emit('metadata', response.metadata); |
|
|
|
|
}); |
|
|
|
|
var status_batch = {}; |
|
|
|
|
status_batch[grpc.opType.RECV_STATUS_ON_CLIENT] = true; |
|
|
|
|
call.startBatch(status_batch, function(err, response) { |
|
|
|
|
if (err) { |
|
|
|
|
throw err; |
|
|
|
|
} |
|
|
|
|
stream.emit('status', response.status); |
|
|
|
|
if (response.status.code !== grpc.status.OK) { |
|
|
|
|
var error = new Error(response.status.details); |
|
|
|
@ -455,6 +467,12 @@ function makeBidiStreamRequestFunction(method, serialize, deserialize) { |
|
|
|
|
error.metadata = response.status.metadata; |
|
|
|
|
stream.emit('error', error); |
|
|
|
|
return; |
|
|
|
|
} else { |
|
|
|
|
if (err) { |
|
|
|
|
// Got a batch error, but OK status. Something went wrong
|
|
|
|
|
stream.emit('error', err); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
}); |
|
|
|
|
}); |
|
|
|
|