Merge pull request #82 from tbetbetbe/grpc_ruby_add_rubocop_fix_lint_style_issues

Grpc ruby add rubocop fix lint style issues
pull/89/head
Michael Lumish 10 years ago
commit 861c79b08a
  1. 10
      src/ruby/.rubocop.yml
  2. 52
      src/ruby/.rubocop_todo.yml
  3. 32
      src/ruby/Rakefile
  4. 57
      src/ruby/bin/interop/interop_client.rb
  5. 35
      src/ruby/bin/interop/interop_server.rb
  6. 25
      src/ruby/bin/math_client.rb
  7. 44
      src/ruby/bin/math_server.rb
  8. 12
      src/ruby/bin/noproto_client.rb
  9. 16
      src/ruby/bin/noproto_server.rb
  10. 26
      src/ruby/ext/grpc/extconf.rb
  11. 29
      src/ruby/grpc.gemspec
  12. 19
      src/ruby/lib/grpc/beefcake.rb
  13. 7
      src/ruby/lib/grpc/core/event.rb
  14. 18
      src/ruby/lib/grpc/core/time_consts.rb
  15. 9
      src/ruby/lib/grpc/errors.rb
  16. 919
      src/ruby/lib/grpc/generic/active_call.rb
  17. 337
      src/ruby/lib/grpc/generic/bidi_call.rb
  18. 706
      src/ruby/lib/grpc/generic/client_stub.rb
  19. 131
      src/ruby/lib/grpc/generic/rpc_desc.rb
  20. 648
      src/ruby/lib/grpc/generic/rpc_server.rb
  21. 326
      src/ruby/lib/grpc/generic/service.rb
  22. 1
      src/ruby/lib/grpc/version.rb
  23. 2
      src/ruby/spec/alloc_spec.rb
  24. 4
      src/ruby/spec/byte_buffer_spec.rb
  25. 60
      src/ruby/spec/call_spec.rb
  26. 37
      src/ruby/spec/channel_spec.rb
  27. 82
      src/ruby/spec/client_server_spec.rb
  28. 5
      src/ruby/spec/completion_queue_spec.rb
  29. 14
      src/ruby/spec/credentials_spec.rb
  30. 22
      src/ruby/spec/event_spec.rb
  31. 48
      src/ruby/spec/generic/active_call_spec.rb
  32. 143
      src/ruby/spec/generic/client_stub_spec.rb
  33. 117
      src/ruby/spec/generic/rpc_desc_spec.rb
  34. 35
      src/ruby/spec/generic/rpc_server_pool_spec.rb
  35. 139
      src/ruby/spec/generic/rpc_server_spec.rb
  36. 58
      src/ruby/spec/generic/service_spec.rb
  37. 2
      src/ruby/spec/metadata_spec.rb
  38. 4
      src/ruby/spec/port_picker.rb
  39. 13
      src/ruby/spec/server_credentials_spec.rb
  40. 50
      src/ruby/spec/server_spec.rb
  41. 4
      src/ruby/spec/time_consts_spec.rb

@ -0,0 +1,10 @@
# This is the configuration used to check the rubocop source code.
inherit_from: .rubocop_todo.yml
AllCops:
Exclude:
- 'bin/apis/**/*'
- 'bin/interop/test/**/*'
- 'bin/math.rb'
- 'bin/math_services.rb'

@ -0,0 +1,52 @@
# This configuration was generated by `rubocop --auto-gen-config`
# on 2015-01-16 02:30:04 -0800 using RuboCop version 0.28.0.
# The point is for the user to remove these configuration records
# one by one as the offenses are removed from the code base.
# Note that changes in the inspected code, or installation of new
# versions of RuboCop, may require this file to be generated again.
# Offense count: 3
# Lint/UselessAssignment:
# Enabled: false
# Offense count: 33
Metrics/AbcSize:
Max: 39
# Offense count: 3
# Configuration parameters: CountComments.
Metrics/ClassLength:
Max: 231
# Offense count: 2
Metrics/CyclomaticComplexity:
Max: 8
# Offense count: 36
# Configuration parameters: CountComments.
Metrics/MethodLength:
Max: 37
# Offense count: 8
# Configuration parameters: CountKeywordArgs.
Metrics/ParameterLists:
Max: 8
# Offense count: 2
Metrics/PerceivedComplexity:
Max: 10
# Offense count: 7
# Configuration parameters: AllowedVariables.
Style/GlobalVars:
Enabled: false
# Offense count: 1
# Configuration parameters: EnforcedStyle, MinBodyLength, SupportedStyles.
Style/Next:
Enabled: false
# Offense count: 2
# Configuration parameters: Methods.
Style/SingleLineBlockParams:
Enabled: false

@ -1,46 +1,44 @@
# -*- ruby -*- # -*- ruby -*-
require 'rake/extensiontask' require 'rake/extensiontask'
require 'rspec/core/rake_task' require 'rspec/core/rake_task'
require 'rubocop/rake_task'
desc 'Run Rubocop to check for style violations'
RuboCop::RakeTask.new
Rake::ExtensionTask.new 'grpc' do |ext| Rake::ExtensionTask.new 'grpc' do |ext|
ext.lib_dir = File.join('lib', 'grpc') ext.lib_dir = File.join('lib', 'grpc')
end end
SPEC_SUITES = [ SPEC_SUITES = [
{ :id => :wrapper, :title => 'wrapper layer', :files => %w(spec/*.rb) }, { id: :wrapper, title: 'wrapper layer', files: %w(spec/*.rb) },
{ :id => :idiomatic, :title => 'idiomatic layer', :dir => %w(spec/generic), { id: :idiomatic, title: 'idiomatic layer', dir: %w(spec/generic),
:tag => '~bidi' }, tag: '~bidi' },
{ :id => :bidi, :title => 'bidi tests', :dir => %w(spec/generic), { id: :bidi, title: 'bidi tests', dir: %w(spec/generic),
:tag => 'bidi' } tag: 'bidi' }
] ]
desc "Run all RSpec tests" desc 'Run all RSpec tests'
namespace :spec do namespace :spec do
namespace :suite do namespace :suite do
SPEC_SUITES.each do |suite| SPEC_SUITES.each do |suite|
desc "Run all specs in #{suite[:title]} spec suite" desc "Run all specs in #{suite[:title]} spec suite"
RSpec::Core::RakeTask.new(suite[:id]) do |t| RSpec::Core::RakeTask.new(suite[:id]) do |t|
spec_files = [] spec_files = []
if suite[:files] suite[:files].each { |f| spec_files += Dir[f] } if suite[:files]
suite[:files].each { |f| spec_files += Dir[f] }
end
if suite[:dirs] if suite[:dirs]
suite[:dirs].each { |f| spec_files += Dir["#{f}/**/*_spec.rb"] } suite[:dirs].each { |f| spec_files += Dir["#{f}/**/*_spec.rb"] }
end end
t.pattern = spec_files t.pattern = spec_files
t.rspec_opts = "--tag #{suite[:tag]}" if suite[:tag]
if suite[:tag]
t.rspec_opts = "--tag #{suite[:tag]}"
end
end end
end end
end end
end end
task :default => "spec:suite:idiomatic" # this should be spec:suite:bidi task default: 'spec:suite:idiomatic' # this should be spec:suite:bidi
task "spec:suite:wrapper" => :compile task 'spec:suite:wrapper' => :compile
task "spec:suite:idiomatic" => "spec:suite:wrapper" task 'spec:suite:idiomatic' => 'spec:suite:wrapper'
task "spec:suite:bidi" => "spec:suite:idiomatic" task 'spec:suite:bidi' => 'spec:suite:idiomatic'

@ -65,7 +65,7 @@ end
# creates a Credentials from the test certificates. # creates a Credentials from the test certificates.
def test_creds def test_creds
certs = load_test_certs certs = load_test_certs
creds = GRPC::Core::Credentials.new(certs[0]) GRPC::Core::Credentials.new(certs[0])
end end
# creates a test stub that accesses host:port securely. # creates a test stub that accesses host:port securely.
@ -73,15 +73,15 @@ def create_stub(host, port)
address = "#{host}:#{port}" address = "#{host}:#{port}"
stub_opts = { stub_opts = {
:creds => test_creds, :creds => test_creds,
GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com', GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com'
} }
logger.info("... connecting securely to #{address}") logger.info("... connecting securely to #{address}")
stub = Grpc::Testing::TestService::Stub.new(address, **stub_opts) Grpc::Testing::TestService::Stub.new(address, **stub_opts)
end end
# produces a string of null chars (\0) of length l. # produces a string of null chars (\0) of length l.
def nulls(l) def nulls(l)
raise 'requires #{l} to be +ve' if l < 0 fail 'requires #{l} to be +ve' if l < 0
[].pack('x' * l).force_encoding('utf-8') [].pack('x' * l).force_encoding('utf-8')
end end
@ -102,13 +102,13 @@ class PingPongPlayer
def each_item def each_item
return enum_for(:each_item) unless block_given? return enum_for(:each_item) unless block_given?
req_cls, p_cls= StreamingOutputCallRequest, ResponseParameters # short req_cls, p_cls = StreamingOutputCallRequest, ResponseParameters # short
count = 0 count = 0
@msg_sizes.each do |m| @msg_sizes.each do |m|
req_size, resp_size = m req_size, resp_size = m
req = req_cls.new(:payload => Payload.new(:body => nulls(req_size)), req = req_cls.new(payload: Payload.new(body: nulls(req_size)),
:response_type => COMPRESSABLE, response_type: COMPRESSABLE,
:response_parameters => [p_cls.new(:size => resp_size)]) response_parameters: [p_cls.new(size: resp_size)])
yield req yield req
resp = @queue.pop resp = @queue.pop
assert_equal(PayloadType.lookup(COMPRESSABLE), resp.payload.type, assert_equal(PayloadType.lookup(COMPRESSABLE), resp.payload.type,
@ -148,11 +148,11 @@ class NamedTests
# ruby server # ruby server
# FAILED # FAILED
def large_unary def large_unary
req_size, wanted_response_size = 271828, 314159 req_size, wanted_response_size = 271_828, 314_159
payload = Payload.new(:type => COMPRESSABLE, :body => nulls(req_size)) payload = Payload.new(type: COMPRESSABLE, body: nulls(req_size))
req = SimpleRequest.new(:response_type => COMPRESSABLE, req = SimpleRequest.new(response_type: COMPRESSABLE,
:response_size => wanted_response_size, response_size: wanted_response_size,
:payload => payload) payload: payload)
resp = @stub.unary_call(req) resp = @stub.unary_call(req)
assert_equal(wanted_response_size, resp.payload.body.length, assert_equal(wanted_response_size, resp.payload.body.length,
'large_unary: payload had the wrong length') 'large_unary: payload had the wrong length')
@ -166,27 +166,27 @@ class NamedTests
# ruby server # ruby server
# FAILED # FAILED
def client_streaming def client_streaming
msg_sizes = [27182, 8, 1828, 45904] msg_sizes = [27_182, 8, 1828, 45_904]
wanted_aggregate_size = 74922 wanted_aggregate_size = 74_922
reqs = msg_sizes.map do |x| reqs = msg_sizes.map do |x|
req = Payload.new(:body => nulls(x)) req = Payload.new(body: nulls(x))
StreamingInputCallRequest.new(:payload => req) StreamingInputCallRequest.new(payload: req)
end end
resp = @stub.streaming_input_call(reqs) resp = @stub.streaming_input_call(reqs)
assert_equal(wanted_aggregate_size, resp.aggregated_payload_size, assert_equal(wanted_aggregate_size, resp.aggregated_payload_size,
'client_streaming: aggregate payload size is incorrect') 'client_streaming: aggregate payload size is incorrect')
p 'OK: client_streaming' p 'OK: client_streaming'
end end
# TESTING: # TESTING:
# PASSED # PASSED
# ruby server # ruby server
# FAILED # FAILED
def server_streaming def server_streaming
msg_sizes = [31415, 9, 2653, 58979] msg_sizes = [31_415, 9, 2653, 58_979]
response_spec = msg_sizes.map { |s| ResponseParameters.new(:size => s) } response_spec = msg_sizes.map { |s| ResponseParameters.new(size: s) }
req = StreamingOutputCallRequest.new(:response_type => COMPRESSABLE, req = StreamingOutputCallRequest.new(response_type: COMPRESSABLE,
:response_parameters => response_spec) response_parameters: response_spec)
resps = @stub.streaming_output_call(req) resps = @stub.streaming_output_call(req)
resps.each_with_index do |r, i| resps.each_with_index do |r, i|
assert i < msg_sizes.length, 'too many responses' assert i < msg_sizes.length, 'too many responses'
@ -203,13 +203,12 @@ class NamedTests
# ruby server # ruby server
# FAILED # FAILED
def ping_pong def ping_pong
msg_sizes = [[27182, 31415], [8, 9], [1828, 2653], [45904, 58979]] msg_sizes = [[27_182, 31_415], [8, 9], [1828, 2653], [45_904, 58_979]]
ppp = PingPongPlayer.new(msg_sizes) ppp = PingPongPlayer.new(msg_sizes)
resps = @stub.full_duplex_call(ppp.each_item) resps = @stub.full_duplex_call(ppp.each_item)
resps.each { |r| ppp.queue.push(r) } resps.each { |r| ppp.queue.push(r) }
p 'OK: ping_pong' p 'OK: ping_pong'
end end
end end
# validates the the command line options, returning them as a Hash. # validates the the command line options, returning them as a Hash.
@ -217,7 +216,7 @@ def parse_options
options = { options = {
'server_host' => nil, 'server_host' => nil,
'server_port' => nil, 'server_port' => nil,
'test_case' => nil, 'test_case' => nil
} }
OptionParser.new do |opts| OptionParser.new do |opts|
opts.banner = 'Usage: --server_host <server_host> --server_port server_port' opts.banner = 'Usage: --server_host <server_host> --server_port server_port'
@ -228,17 +227,17 @@ def parse_options
options['server_port'] = v options['server_port'] = v
end end
# instance_methods(false) gives only the methods defined in that class # instance_methods(false) gives only the methods defined in that class
test_cases = NamedTests.instance_methods(false).map { |t| t.to_s } test_cases = NamedTests.instance_methods(false).map(&:to_s)
test_case_list = test_cases.join(',') test_case_list = test_cases.join(',')
opts.on("--test_case CODE", test_cases, {}, "select a test_case", opts.on('--test_case CODE', test_cases, {}, 'select a test_case',
" (#{test_case_list})") do |v| " (#{test_case_list})") do |v|
options['test_case'] = v options['test_case'] = v
end end
end.parse! end.parse!
['server_host', 'server_port', 'test_case'].each do |arg| %w(server_host, server_port, test_case).each do |arg|
if options[arg].nil? if options[arg].nil?
raise OptionParser::MissingArgument.new("please specify --#{arg}") fail(OptionParser::MissingArgument, "please specify --#{arg}")
end end
end end
options options

@ -62,12 +62,12 @@ end
# creates a ServerCredentials from the test certificates. # creates a ServerCredentials from the test certificates.
def test_server_creds def test_server_creds
certs = load_test_certs certs = load_test_certs
server_creds = GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2]) GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2])
end end
# produces a string of null chars (\0) of length l. # produces a string of null chars (\0) of length l.
def nulls(l) def nulls(l)
raise 'requires #{l} to be +ve' if l < 0 fail 'requires #{l} to be +ve' if l < 0
[].pack('x' * l).force_encoding('utf-8') [].pack('x' * l).force_encoding('utf-8')
end end
@ -86,7 +86,7 @@ class EnumeratorQueue
loop do loop do
r = @q.pop r = @q.pop
break if r.equal?(@sentinel) break if r.equal?(@sentinel)
raise r if r.is_a?Exception fail r if r.is_a? Exception
yield r yield r
end end
end end
@ -98,27 +98,27 @@ class TestTarget < Grpc::Testing::TestService::Service
include Grpc::Testing include Grpc::Testing
include Grpc::Testing::PayloadType include Grpc::Testing::PayloadType
def empty_call(empty, call) def empty_call(_empty, _call)
Empty.new Empty.new
end end
def unary_call(simple_req, call) def unary_call(simple_req, _call)
req_size = simple_req.response_size req_size = simple_req.response_size
SimpleResponse.new(:payload => Payload.new(:type => COMPRESSABLE, SimpleResponse.new(payload: Payload.new(type: COMPRESSABLE,
:body => nulls(req_size))) body: nulls(req_size)))
end end
def streaming_input_call(call) def streaming_input_call(call)
sizes = call.each_remote_read.map { |x| x.payload.body.length } sizes = call.each_remote_read.map { |x| x.payload.body.length }
sum = sizes.inject { |sum,x| sum + x } sum = sizes.inject { |s, x| s + x }
StreamingInputCallResponse.new(:aggregated_payload_size => sum) StreamingInputCallResponse.new(aggregated_payload_size: sum)
end end
def streaming_output_call(req, call) def streaming_output_call(req, _call)
cls = StreamingOutputCallResponse cls = StreamingOutputCallResponse
req.response_parameters.map do |p| req.response_parameters.map do |p|
cls.new(:payload => Payload.new(:type => req.response_type, cls.new(payload: Payload.new(type: req.response_type,
:body => nulls(p.size))) body: nulls(p.size)))
end end
end end
@ -126,13 +126,13 @@ class TestTarget < Grpc::Testing::TestService::Service
# reqs is a lazy Enumerator of the requests sent by the client. # reqs is a lazy Enumerator of the requests sent by the client.
q = EnumeratorQueue.new(self) q = EnumeratorQueue.new(self)
cls = StreamingOutputCallResponse cls = StreamingOutputCallResponse
t = Thread.new do Thread.new do
begin begin
reqs.each do |req| reqs.each do |req|
logger.info("read #{req.inspect}") logger.info("read #{req.inspect}")
resp_size = req.response_parameters[0].size resp_size = req.response_parameters[0].size
resp = cls.new(:payload => Payload.new(:type => req.response_type, resp = cls.new(payload: Payload.new(type: req.response_type,
:body => nulls(resp_size))) body: nulls(resp_size)))
q.push(resp) q.push(resp)
end end
logger.info('finished reads') logger.info('finished reads')
@ -149,13 +149,12 @@ class TestTarget < Grpc::Testing::TestService::Service
# currently used in any tests # currently used in any tests
full_duplex_call(reqs) full_duplex_call(reqs)
end end
end end
# validates the the command line options, returning them as a Hash. # validates the the command line options, returning them as a Hash.
def parse_options def parse_options
options = { options = {
'port' => nil, 'port' => nil
} }
OptionParser.new do |opts| OptionParser.new do |opts|
opts.banner = 'Usage: --port port' opts.banner = 'Usage: --port port'
@ -165,7 +164,7 @@ def parse_options
end.parse! end.parse!
if options['port'].nil? if options['port'].nil?
raise OptionParser::MissingArgument.new("please specify --port") fail(OptionParser::MissingArgument, 'please specify --port')
end end
options options
end end

@ -29,7 +29,6 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Sample app that accesses a Calc service running on a Ruby gRPC server and # Sample app that accesses a Calc service running on a Ruby gRPC server and
# helps validate RpcServer as a gRPC server using proto2 serialization. # helps validate RpcServer as a gRPC server using proto2 serialization.
# #
@ -49,9 +48,9 @@ include GRPC::Core::TimeConsts
def do_div(stub) def do_div(stub)
logger.info('request_response') logger.info('request_response')
logger.info('----------------') logger.info('----------------')
req = Math::DivArgs.new(:dividend => 7, :divisor => 3) req = Math::DivArgs.new(dividend: 7, divisor: 3)
logger.info("div(7/3): req=#{req.inspect}") logger.info("div(7/3): req=#{req.inspect}")
resp = stub.div(req, deadline=INFINITE_FUTURE) resp = stub.div(req, INFINITE_FUTURE)
logger.info("Answer: #{resp.inspect}") logger.info("Answer: #{resp.inspect}")
logger.info('----------------') logger.info('----------------')
end end
@ -60,7 +59,7 @@ def do_sum(stub)
# to make client streaming requests, pass an enumerable of the inputs # to make client streaming requests, pass an enumerable of the inputs
logger.info('client_streamer') logger.info('client_streamer')
logger.info('---------------') logger.info('---------------')
reqs = [1, 2, 3, 4, 5].map { |x| Math::Num.new(:num => x) } reqs = [1, 2, 3, 4, 5].map { |x| Math::Num.new(num: x) }
logger.info("sum(1, 2, 3, 4, 5): reqs=#{reqs.inspect}") logger.info("sum(1, 2, 3, 4, 5): reqs=#{reqs.inspect}")
resp = stub.sum(reqs) # reqs.is_a?(Enumerable) resp = stub.sum(reqs) # reqs.is_a?(Enumerable)
logger.info("Answer: #{resp.inspect}") logger.info("Answer: #{resp.inspect}")
@ -70,9 +69,9 @@ end
def do_fib(stub) def do_fib(stub)
logger.info('server_streamer') logger.info('server_streamer')
logger.info('----------------') logger.info('----------------')
req = Math::FibArgs.new(:limit => 11) req = Math::FibArgs.new(limit: 11)
logger.info("fib(11): req=#{req.inspect}") logger.info("fib(11): req=#{req.inspect}")
resp = stub.fib(req, deadline=INFINITE_FUTURE) resp = stub.fib(req, INFINITE_FUTURE)
resp.each do |r| resp.each do |r|
logger.info("Answer: #{r.inspect}") logger.info("Answer: #{r.inspect}")
end end
@ -83,11 +82,11 @@ def do_div_many(stub)
logger.info('bidi_streamer') logger.info('bidi_streamer')
logger.info('-------------') logger.info('-------------')
reqs = [] reqs = []
reqs << Math::DivArgs.new(:dividend => 7, :divisor => 3) reqs << Math::DivArgs.new(dividend: 7, divisor: 3)
reqs << Math::DivArgs.new(:dividend => 5, :divisor => 2) reqs << Math::Di5AvArgs.new(dividend: 5, divisor: 2)
reqs << Math::DivArgs.new(:dividend => 7, :divisor => 2) reqs << Math::DivArgs.new(dividend: 7, divisor: 2)
logger.info("div(7/3), div(5/2), div(7/2): reqs=#{reqs.inspect}") logger.info("div(7/3), div(5/2), div(7/2): reqs=#{reqs.inspect}")
resp = stub.div_many(reqs, deadline=10) resp = stub.div_many(reqs, 10)
resp.each do |r| resp.each do |r|
logger.info("Answer: #{r.inspect}") logger.info("Answer: #{r.inspect}")
end end
@ -103,7 +102,7 @@ end
def test_creds def test_creds
certs = load_test_certs certs = load_test_certs
creds = GRPC::Core::Credentials.new(certs[0]) GRPC::Core::Credentials.new(certs[0])
end end
def main def main
@ -117,7 +116,7 @@ def main
options['host'] = v options['host'] = v
end end
opts.on('-s', '--secure', 'access using test creds') do |v| opts.on('-s', '--secure', 'access using test creds') do |v|
options['secure'] = true options['secure'] = v
end end
end.parse! end.parse!
@ -128,7 +127,7 @@ def main
if options['secure'] if options['secure']
stub_opts = { stub_opts = {
:creds => test_creds, :creds => test_creds,
GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com', GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com'
} }
p stub_opts p stub_opts
p options['host'] p options['host']

@ -46,9 +46,8 @@ require 'optparse'
# Holds state for a fibonacci series # Holds state for a fibonacci series
class Fibber class Fibber
def initialize(limit) def initialize(limit)
raise "bad limit: got #{limit}, want limit > 0" if limit < 1 fail "bad limit: got #{limit}, want limit > 0" if limit < 1
@limit = limit @limit = limit
end end
@ -57,14 +56,14 @@ class Fibber
idx, current, previous = 0, 1, 1 idx, current, previous = 0, 1, 1
until idx == @limit until idx == @limit
if idx == 0 || idx == 1 if idx == 0 || idx == 1
yield Math::Num.new(:num => 1) yield Math::Num.new(num: 1)
idx += 1 idx += 1
next next
end end
tmp = current tmp = current
current = previous + current current = previous + current
previous = tmp previous = tmp
yield Math::Num.new(:num => current) yield Math::Num.new(num: current)
idx += 1 idx += 1
end end
end end
@ -85,43 +84,41 @@ class EnumeratorQueue
loop do loop do
r = @q.pop r = @q.pop
break if r.equal?(@sentinel) break if r.equal?(@sentinel)
raise r if r.is_a?Exception fail r if r.is_a? Exception
yield r yield r
end end
end end
end end
# The Math::Math:: module occurs because the service has the same name as its # The Math::Math:: module occurs because the service has the same name as its
# package. That practice should be avoided by defining real services. # package. That practice should be avoided by defining real services.
class Calculator < Math::Math::Service class Calculator < Math::Math::Service
def div(div_args, _call)
def div(div_args, call)
if div_args.divisor == 0 if div_args.divisor == 0
# To send non-OK status handlers raise a StatusError with the code and # To send non-OK status handlers raise a StatusError with the code and
# and detail they want sent as a Status. # and detail they want sent as a Status.
raise GRPC::StatusError.new(GRPC::Status::INVALID_ARGUMENT, fail GRPC::StatusError.new(GRPC::Status::INVALID_ARGUMENT,
'divisor cannot be 0') 'divisor cannot be 0')
end end
Math::DivReply.new(:quotient => div_args.dividend/div_args.divisor, Math::DivReply.new(quotient: div_args.dividend / div_args.divisor,
:remainder => div_args.dividend % div_args.divisor) remainder: div_args.dividend % div_args.divisor)
end end
def sum(call) def sum(call)
# the requests are accesible as the Enumerator call#each_request # the requests are accesible as the Enumerator call#each_request
nums = call.each_remote_read.collect { |x| x.num } nums = call.each_remote_read.collect(&:num)
sum = nums.inject { |sum,x| sum + x } sum = nums.inject { |s, x| s + x }
Math::Num.new(:num => sum) Math::Num.new(num: sum)
end end
def fib(fib_args, call) def fib(fib_args, _call)
if fib_args.limit < 1 if fib_args.limit < 1
raise StatusError.new(Status::INVALID_ARGUMENT, 'limit must be >= 0') fail StatusError.new(Status::INVALID_ARGUMENT, 'limit must be >= 0')
end end
# return an Enumerator of Nums # return an Enumerator of Nums
Fibber.new(fib_args.limit).generator() Fibber.new(fib_args.limit).generator
# just return the generator, GRPC::GenericServer sends each actual response # just return the generator, GRPC::GenericServer sends each actual response
end end
@ -132,10 +129,10 @@ class Calculator < Math::Math::Service
begin begin
requests.each do |req| requests.each do |req|
logger.info("read #{req.inspect}") logger.info("read #{req.inspect}")
resp = Math::DivReply.new(:quotient => req.dividend/req.divisor, resp = Math::DivReply.new(quotient: req.dividend / req.divisor,
:remainder => req.dividend % req.divisor) remainder: req.dividend % req.divisor)
q.push(resp) q.push(resp)
Thread::pass # let the internal Bidi threads run Thread.pass # let the internal Bidi threads run
end end
logger.info('finished reads') logger.info('finished reads')
q.push(self) q.push(self)
@ -147,7 +144,6 @@ class Calculator < Math::Math::Service
t.priority = -2 # hint that the div_many thread should not be favoured t.priority = -2 # hint that the div_many thread should not be favoured
q.each_item q.each_item
end end
end end
def load_test_certs def load_test_certs
@ -159,7 +155,7 @@ end
def test_server_creds def test_server_creds
certs = load_test_certs certs = load_test_certs
server_creds = GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2]) GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2])
end end
def main def main
@ -173,7 +169,7 @@ def main
options['host'] = v options['host'] = v
end end
opts.on('-s', '--secure', 'access using test creds') do |v| opts.on('-s', '--secure', 'access using test creds') do |v|
options['secure'] = true options['secure'] = v
end end
end.parse! end.parse!

@ -40,16 +40,18 @@ $LOAD_PATH.unshift(lib_dir) unless $LOAD_PATH.include?(lib_dir)
require 'grpc' require 'grpc'
require 'optparse' require 'optparse'
# a simple non-protobuf message class.
class NoProtoMsg class NoProtoMsg
def self.marshal(o) def self.marshal(_o)
'' ''
end end
def self.unmarshal(o) def self.unmarshal(_o)
NoProtoMsg.new NoProtoMsg.new
end end
end end
# service the uses the non-protobuf message class.
class NoProtoService class NoProtoService
include GRPC::GenericService include GRPC::GenericService
rpc :AnRPC, NoProtoMsg, NoProtoMsg rpc :AnRPC, NoProtoMsg, NoProtoMsg
@ -66,7 +68,7 @@ end
def test_creds def test_creds
certs = load_test_certs certs = load_test_certs
creds = GRPC::Core::Credentials.new(certs[0]) GRPC::Core::Credentials.new(certs[0])
end end
def main def main
@ -80,14 +82,14 @@ def main
options['host'] = v options['host'] = v
end end
opts.on('-s', '--secure', 'access using test creds') do |v| opts.on('-s', '--secure', 'access using test creds') do |v|
options['secure'] = true options['secure'] = v
end end
end.parse! end.parse!
if options['secure'] if options['secure']
stub_opts = { stub_opts = {
:creds => test_creds, :creds => test_creds,
GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com', GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com'
} }
p stub_opts p stub_opts
p options['host'] p options['host']

@ -40,26 +40,29 @@ $LOAD_PATH.unshift(lib_dir) unless $LOAD_PATH.include?(lib_dir)
require 'grpc' require 'grpc'
require 'optparse' require 'optparse'
# a simple non-protobuf message class.
class NoProtoMsg class NoProtoMsg
def self.marshal(o) def self.marshal(_o)
'' ''
end end
def self.unmarshal(o) def self.unmarshal(_o)
NoProtoMsg.new NoProtoMsg.new
end end
end end
# service the uses the non-protobuf message class.
class NoProtoService class NoProtoService
include GRPC::GenericService include GRPC::GenericService
rpc :AnRPC, NoProtoMsg, NoProtoMsg rpc :AnRPC, NoProtoMsg, NoProtoMsg
end end
# an implementation of the non-protobuf service.
class NoProto < NoProtoService class NoProto < NoProtoService
def initialize(default_var='ignored') def initialize(_default_var = 'ignored')
end end
def an_rpc(req, call) def an_rpc(req, _call)
logger.info('echo service received a request') logger.info('echo service received a request')
req req
end end
@ -74,7 +77,7 @@ end
def test_server_creds def test_server_creds
certs = load_test_certs certs = load_test_certs
server_creds = GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2]) GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2])
end end
def main def main
@ -88,7 +91,7 @@ def main
options['host'] = v options['host'] = v
end end
opts.on('-s', '--secure', 'access using test creds') do |v| opts.on('-s', '--secure', 'access using test creds') do |v|
options['secure'] = true options['secure'] = v
end end
end.parse! end.parse!
@ -106,5 +109,4 @@ def main
s.run s.run
end end
main main

@ -33,29 +33,29 @@ LIBDIR = RbConfig::CONFIG['libdir']
INCLUDEDIR = RbConfig::CONFIG['includedir'] INCLUDEDIR = RbConfig::CONFIG['includedir']
HEADER_DIRS = [ HEADER_DIRS = [
# Search /opt/local (Mac source install) # Search /opt/local (Mac source install)
'/opt/local/include', '/opt/local/include',
# Search /usr/local (Source install) # Search /usr/local (Source install)
'/usr/local/include', '/usr/local/include',
# Check the ruby install locations # Check the ruby install locations
INCLUDEDIR, INCLUDEDIR
] ]
LIB_DIRS = [ LIB_DIRS = [
# Search /opt/local (Mac source install) # Search /opt/local (Mac source install)
'/opt/local/lib', '/opt/local/lib',
# Search /usr/local (Source install) # Search /usr/local (Source install)
'/usr/local/lib', '/usr/local/lib',
# Check the ruby install locations # Check the ruby install locations
LIBDIR, LIBDIR
] ]
def crash(msg) def crash(msg)
print(" extconf failure: %s\n" % msg) print(" extconf failure: #{msg}\n")
exit 1 exit 1
end end

@ -1,31 +1,34 @@
# encoding: utf-8 # encoding: utf-8
$:.push File.expand_path("../lib", __FILE__) $LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'grpc/version' require 'grpc/version'
Gem::Specification.new do |s| Gem::Specification.new do |s|
s.name = "grpc" s.name = 'grpc'
s.version = Google::RPC::VERSION s.version = Google::RPC::VERSION
s.authors = ["One Platform Team"] s.authors = ['One Platform Team']
s.email = "stubby-team@google.com" s.email = 'stubby-team@google.com'
s.homepage = "http://go/grpc" s.homepage = 'http://go/grpc'
s.summary = 'Google RPC system in Ruby' s.summary = 'Google RPC system in Ruby'
s.description = 'Send RPCs from Ruby' s.description = 'Send RPCs from Ruby'
s.files = `git ls-files`.split("\n") s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n") s.test_files = `git ls-files -- spec/*`.split("\n")
s.executables = `git ls-files -- examples/*.rb`.split("\n").map{ |f| File.basename(f) } s.executables = `git ls-files -- bin/*.rb`.split("\n").map do |f|
s.require_paths = ['lib' ] File.basename(f)
end
s.require_paths = ['lib']
s.platform = Gem::Platform::RUBY s.platform = Gem::Platform::RUBY
s.add_dependency 'xray' s.add_dependency 'xray'
s.add_dependency 'logging', '~> 1.8' s.add_dependency 'logging', '~> 1.8'
s.add_dependency 'google-protobuf', '~> 3.0.0alpha.1.1' s.add_dependency 'google-protobuf', '~> 3.0.0alpha'
s.add_dependency 'minitest', '~> 5.4' # not a dev dependency, used by the interop tests s.add_dependency 'minitest', '~> 5.4' # reqd for interop tests
s.add_development_dependency "bundler", "~> 1.7" s.add_development_dependency 'bundler', '~> 1.7'
s.add_development_dependency "rake", "~> 10.0" s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'rake-compiler', '~> 0' s.add_development_dependency 'rake-compiler', '~> 0'
s.add_development_dependency 'rspec', "~> 3.0" s.add_development_dependency 'rubocop', '~> 0.28.0'
s.add_development_dependency 'rspec', '~> 3.0'
s.extensions = %w[ext/grpc/extconf.rb] s.extensions = %w(ext/grpc/extconf.rb)
end end

@ -29,25 +29,21 @@
require 'beefcake' require 'beefcake'
# Re-open the beefcake message module to add a static encode
#
# This is a temporary measure while beefcake is used as the default proto
# library for developing grpc ruby. Once that changes to the official proto
# library this can be removed. It's necessary to allow the update the service
# module to assume a static encode method.
#
# TODO(temiola): remove me, once official code generation is available in protoc
module Beefcake module Beefcake
# Re-open the beefcake message module to add a static encode
#
# This is a temporary measure while beefcake is used as the default proto
# library for developing grpc ruby. Once that changes to the official proto
# library this can be removed. It's necessary to allow the update the service
# module to assume a static encode method.
# TODO(temiola): remove this.
module Message module Message
# additional mixin module that adds static encode method when include # additional mixin module that adds static encode method when include
module StaticEncode module StaticEncode
# encodes o with its instance#encode method # encodes o with its instance#encode method
def encode(o) def encode(o)
o.encode o.encode
end end
end end
# extend self.included in Beefcake::Message to include StaticEncode # extend self.included in Beefcake::Message to include StaticEncode
@ -57,6 +53,5 @@ module Beefcake
o.extend Decode o.extend Decode
o.send(:include, Encode) o.send(:include, Encode)
end end
end end
end end

@ -30,9 +30,12 @@
module Google module Google
module RPC module RPC
module Core module Core
class Event # Add an inspect method to C-defined Event class. # Event is a class defined in the c extension
#
# Here, we add an inspect method.
class Event
def inspect def inspect
'<%s: type:%s, tag:%s result:%s>' % [self.class, type, tag, result] "<#{self.class}: type:#{type}, tag:#{tag} result:#{result}>"
end end
end end
end end

@ -32,9 +32,10 @@ require 'grpc'
module Google module Google
module RPC module RPC
module Core module Core
# TimeConsts is a module from the C extension.
module TimeConsts # re-opens a module in the C extension. #
# Here it's re-opened to add a utility func.
module TimeConsts
# Converts a time delta to an absolute deadline. # Converts a time delta to an absolute deadline.
# #
# Assumes timeish is a relative time, and converts its to an absolute, # Assumes timeish is a relative time, and converts its to an absolute,
@ -48,24 +49,23 @@ module Google
# @param timeish [Number|TimeSpec] # @param timeish [Number|TimeSpec]
# @return timeish [Number|TimeSpec] # @return timeish [Number|TimeSpec]
def from_relative_time(timeish) def from_relative_time(timeish)
if timeish.is_a?TimeSpec if timeish.is_a? TimeSpec
timeish timeish
elsif timeish.nil? elsif timeish.nil?
TimeConsts::ZERO TimeConsts::ZERO
elsif !timeish.is_a?Numeric elsif !timeish.is_a? Numeric
raise TypeError('Cannot make an absolute deadline from %s', fail(TypeError,
timeish.inspect) "Cannot make an absolute deadline from #{timeish.inspect}")
elsif timeish < 0 elsif timeish < 0
TimeConsts::INFINITE_FUTURE TimeConsts::INFINITE_FUTURE
elsif timeish == 0 elsif timeish == 0
TimeConsts::ZERO TimeConsts::ZERO
else !timeish.nil? else
Time.now + timeish Time.now + timeish
end end
end end
module_function :from_relative_time module_function :from_relative_time
end end
end end
end end

@ -30,9 +30,8 @@
require 'grpc' require 'grpc'
module Google module Google
# Google::RPC contains the General RPC module.
module RPC module RPC
# OutOfTime is an exception class that indicates that an RPC exceeded its # OutOfTime is an exception class that indicates that an RPC exceeded its
# deadline. # deadline.
OutOfTime = Class.new(StandardError) OutOfTime = Class.new(StandardError)
@ -42,12 +41,11 @@ module Google
# error should be returned to the other end of a GRPC connection; when # error should be returned to the other end of a GRPC connection; when
# caught it means that this end received a status error. # caught it means that this end received a status error.
class BadStatus < StandardError class BadStatus < StandardError
attr_reader :code, :details attr_reader :code, :details
# @param code [Numeric] the status code # @param code [Numeric] the status code
# @param details [String] the details of the exception # @param details [String] the details of the exception
def initialize(code, details='unknown cause') def initialize(code, details = 'unknown cause')
super("#{code}:#{details}") super("#{code}:#{details}")
@code = code @code = code
@details = details @details = details
@ -60,9 +58,6 @@ module Google
def to_status def to_status
Status.new(code, details) Status.new(code, details)
end end
end end
end end
end end

@ -31,519 +31,516 @@ require 'forwardable'
require 'grpc/generic/bidi_call' require 'grpc/generic/bidi_call'
def assert_event_type(ev, want) def assert_event_type(ev, want)
raise OutOfTime if ev.nil? fail OutOfTime if ev.nil?
got = ev.type got = ev.type
raise 'Unexpected rpc event: got %s, want %s' % [got, want] unless got == want fail "Unexpected rpc event: got #{got}, want #{want}" unless got == want
end end
module Google::RPC module Google
# Google::RPC contains the General RPC module.
# The ActiveCall class provides simple methods for sending marshallable module RPC
# data to a call # The ActiveCall class provides simple methods for sending marshallable
class ActiveCall # data to a call
include Core::CompletionType class ActiveCall
include Core::StatusCodes include Core::CompletionType
include Core::TimeConsts include Core::StatusCodes
attr_reader(:deadline) include Core::TimeConsts
attr_reader(:deadline)
# client_start_invoke begins a client invocation.
# # client_start_invoke begins a client invocation.
# Flow Control note: this blocks until flow control accepts that client #
# request can go ahead. # Flow Control note: this blocks until flow control accepts that client
# # request can go ahead.
# deadline is the absolute deadline for the call. #
# # deadline is the absolute deadline for the call.
# == Keyword Arguments == #
# any keyword arguments are treated as metadata to be sent to the server # == Keyword Arguments ==
# if a keyword value is a list, multiple metadata for it's key are sent # any keyword arguments are treated as metadata to be sent to the server
# # if a keyword value is a list, multiple metadata for it's key are sent
# @param call [Call] a call on which to start and invocation #
# @param q [CompletionQueue] used to wait for INVOKE_ACCEPTED # @param call [Call] a call on which to start and invocation
# @param deadline [Fixnum,TimeSpec] the deadline for INVOKE_ACCEPTED # @param q [CompletionQueue] used to wait for INVOKE_ACCEPTED
def self.client_start_invoke(call, q, deadline, **kw) # @param deadline [Fixnum,TimeSpec] the deadline for INVOKE_ACCEPTED
raise ArgumentError.new('not a call') unless call.is_a?Core::Call def self.client_start_invoke(call, q, _deadline, **kw)
if !q.is_a?Core::CompletionQueue fail(ArgumentError, 'not a call') unless call.is_a? Core::Call
raise ArgumentError.new('not a CompletionQueue') unless q.is_a? Core::CompletionQueue
end fail(ArgumentError, 'not a CompletionQueue')
call.add_metadata(kw) if kw.length > 0 end
invoke_accepted, client_metadata_read = Object.new, Object.new call.add_metadata(kw) if kw.length > 0
finished_tag = Object.new invoke_accepted, client_metadata_read = Object.new, Object.new
call.start_invoke(q, invoke_accepted, client_metadata_read, finished_tag) finished_tag = Object.new
call.start_invoke(q, invoke_accepted, client_metadata_read,
finished_tag)
# wait for the invocation to be accepted
ev = q.pluck(invoke_accepted, INFINITE_FUTURE)
fail OutOfTime if ev.nil?
ev.close
# wait for the invocation to be accepted [finished_tag, client_metadata_read]
ev = q.pluck(invoke_accepted, INFINITE_FUTURE) end
raise OutOfTime if ev.nil?
ev.close
[finished_tag, client_metadata_read] # Creates an ActiveCall.
end #
# ActiveCall should only be created after a call is accepted. That means
# different things on a client and a server. On the client, the call is
# accepted after call.start_invoke followed by receipt of the
# corresponding INVOKE_ACCEPTED. on the server, this is after
# call.accept.
#
# #initialize cannot determine if the call is accepted or not; so if a
# call that's not accepted is used here, the error won't be visible until
# the ActiveCall methods are called.
#
# deadline is the absolute deadline for the call.
#
# @param call [Call] the call used by the ActiveCall
# @param q [CompletionQueue] the completion queue used to accept
# the call
# @param marshal [Function] f(obj)->string that marshal requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Fixnum] the deadline for the call to complete
# @param finished_tag [Object] the object used as the call's finish tag,
# if the call has begun
# @param read_metadata_tag [Object] the object used as the call's finish
# tag, if the call has begun
# @param started [true|false] indicates if the call has begun
def initialize(call, q, marshal, unmarshal, deadline, finished_tag: nil,
read_metadata_tag: nil, started: true)
fail(ArgumentError, 'not a call') unless call.is_a? Core::Call
unless q.is_a? Core::CompletionQueue
fail(ArgumentError, 'not a CompletionQueue')
end
@call = call
@cq = q
@deadline = deadline
@finished_tag = finished_tag
@read_metadata_tag = read_metadata_tag
@marshal = marshal
@started = started
@unmarshal = unmarshal
end
# Creates an ActiveCall. # Obtains the status of the call.
# #
# ActiveCall should only be created after a call is accepted. That means # this value is nil until the call completes
# different things on a client and a server. On the client, the call is # @return this call's status
# accepted after call.start_invoke followed by receipt of the def status
# corresponding INVOKE_ACCEPTED. on the server, this is after @call.status
# call.accept.
#
# #initialize cannot determine if the call is accepted or not; so if a
# call that's not accepted is used here, the error won't be visible until
# the ActiveCall methods are called.
#
# deadline is the absolute deadline for the call.
#
# @param call [Call] the call used by the ActiveCall
# @param q [CompletionQueue] the completion queue used to accept
# the call
# @param marshal [Function] f(obj)->string that marshal requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Fixnum] the deadline for the call to complete
# @param finished_tag [Object] the object used as the call's finish tag,
# if the call has begun
# @param read_metadata_tag [Object] the object used as the call's finish
# tag, if the call has begun
# @param started [true|false] (default true) indicates if the call has begun
def initialize(call, q, marshal, unmarshal, deadline, finished_tag: nil,
read_metadata_tag: nil, started: true)
raise ArgumentError.new('not a call') unless call.is_a?Core::Call
if !q.is_a?Core::CompletionQueue
raise ArgumentError.new('not a CompletionQueue')
end end
@call = call
@cq = q
@deadline = deadline
@finished_tag = finished_tag
@read_metadata_tag = read_metadata_tag
@marshal = marshal
@started = started
@unmarshal = unmarshal
end
# Obtains the status of the call. # Obtains the metadata of the call.
# #
# this value is nil until the call completes # At the start of the call this will be nil. During the call this gets
# @return this call's status # some values as soon as the other end of the connection acknowledges the
def status # request.
@call.status #
end # @return this calls's metadata
def metadata
@call.metadata
end
# Obtains the metadata of the call. # Cancels the call.
# #
# At the start of the call this will be nil. During the call this gets # Cancels the call. The call does not return any result, but once this it
# some values as soon as the other end of the connection acknowledges the # has been called, the call should eventually terminate. Due to potential
# request. # races between the execution of the cancel and the in-flight request, the
# # result of the call after calling #cancel is indeterminate:
# @return this calls's metadata #
def metadata # - the call may terminate with a BadStatus exception, with code=CANCELLED
@call.metadata # - the call may terminate with OK Status, and return a response
end # - the call may terminate with a different BadStatus exception if that
# was happening
def cancel
@call.cancel
end
# Cancels the call. # indicates if the call is shutdown
# def shutdown
# Cancels the call. The call does not return any result, but once this it @shutdown ||= false
# has been called, the call should eventually terminate. Due to potential end
# races between the execution of the cancel and the in-flight request, the
# result of the call after calling #cancel is indeterminate:
#
# - the call may terminate with a BadStatus exception, with code=CANCELLED
# - the call may terminate with OK Status, and return a response
# - the call may terminate with a different BadStatus exception if that was
# happening
def cancel
@call.cancel
end
# indicates if the call is shutdown # indicates if the call is cancelled.
def shutdown def cancelled
@shutdown ||= false @cancelled ||= false
end end
# indicates if the call is cancelled. # multi_req_view provides a restricted view of this ActiveCall for use
def cancelled # in a server client-streaming handler.
@cancelled ||= false def multi_req_view
end MultiReqView.new(self)
end
# multi_req_view provides a restricted view of this ActiveCall for use # single_req_view provides a restricted view of this ActiveCall for use in
# in a server client-streaming handler. # a server request-response handler.
def multi_req_view def single_req_view
MultiReqView.new(self) SingleReqView.new(self)
end end
# single_req_view provides a restricted view of this ActiveCall for use in # operation provides a restricted view of this ActiveCall for use as
# a server request-response handler. # a Operation.
def single_req_view def operation
SingleReqView.new(self) Operation.new(self)
end end
# operation provides a restricted view of this ActiveCall for use as # writes_done indicates that all writes are completed.
# a Operation. #
def operation # It blocks until the remote endpoint acknowledges by sending a FINISHED
Operation.new(self) # event, unless assert_finished is set to false. Any calls to
end # #remote_send after this call will fail.
#
# @param assert_finished [true, false] when true(default), waits for
# FINISHED.
def writes_done(assert_finished = true)
@call.writes_done(self)
ev = @cq.pluck(self, INFINITE_FUTURE)
begin
assert_event_type(ev, FINISH_ACCEPTED)
logger.debug("Writes done: waiting for finish? #{assert_finished}")
ensure
ev.close
end
# writes_done indicates that all writes are completed. return unless assert_finished
# ev = @cq.pluck(@finished_tag, INFINITE_FUTURE)
# It blocks until the remote endpoint acknowledges by sending a FINISHED fail 'unexpected nil event' if ev.nil?
# event, unless assert_finished is set to false. Any calls to
# #remote_send after this call will fail.
#
# @param assert_finished [true, false] when true(default), waits for
# FINISHED.
def writes_done(assert_finished=true)
@call.writes_done(self)
ev = @cq.pluck(self, INFINITE_FUTURE)
begin
assert_event_type(ev, FINISH_ACCEPTED)
logger.debug("Writes done: waiting for finish? #{assert_finished}")
ensure
ev.close ev.close
@call.status
end end
if assert_finished # finished waits until the call is completed.
#
# It blocks until the remote endpoint acknowledges by sending a FINISHED
# event.
def finished
ev = @cq.pluck(@finished_tag, INFINITE_FUTURE) ev = @cq.pluck(@finished_tag, INFINITE_FUTURE)
raise "unexpected event: #{ev.inspect}" if ev.nil? begin
ev.close fail "unexpected event: #{ev.inspect}" unless ev.type == FINISHED
return @call.status if @call.metadata.nil?
@call.metadata = ev.result.metadata
else
@call.metadata.merge!(ev.result.metadata)
end
if ev.result.code != Core::StatusCodes::OK
fail BadStatus.new(ev.result.code, ev.result.details)
end
res = ev.result
ensure
ev.close
end
res
end end
end
# finished waits until the call is completed. # remote_send sends a request to the remote endpoint.
# #
# It blocks until the remote endpoint acknowledges by sending a FINISHED # It blocks until the remote endpoint acknowledges by sending a
# event. # WRITE_ACCEPTED. req can be marshalled already.
def finished #
ev = @cq.pluck(@finished_tag, INFINITE_FUTURE) # @param req [Object, String] the object to send or it's marshal form.
begin # @param marshalled [false, true] indicates if the object is already
raise "unexpected event: #{ev.inspect}" unless ev.type == FINISHED # marshalled.
if @call.metadata.nil? def remote_send(req, marshalled = false)
@call.metadata = ev.result.metadata assert_queue_is_ready
logger.debug("sending #{req.inspect}, marshalled? #{marshalled}")
if marshalled
payload = req
else else
@call.metadata.merge!(ev.result.metadata) payload = @marshal.call(req)
end end
@call.start_write(Core::ByteBuffer.new(payload), self)
if ev.result.code != Core::StatusCodes::OK
raise BadStatus.new(ev.result.code, ev.result.details) # call queue#pluck, and wait for WRITE_ACCEPTED, so as not to return
# until the flow control allows another send on this call.
ev = @cq.pluck(self, INFINITE_FUTURE)
begin
assert_event_type(ev, WRITE_ACCEPTED)
ensure
ev.close
end end
res = ev.result
ensure
ev.close
end end
res
end
# remote_send sends a request to the remote endpoint. # send_status sends a status to the remote endpoint
# #
# It blocks until the remote endpoint acknowledges by sending a # @param code [int] the status code to send
# WRITE_ACCEPTED. req can be marshalled already. # @param details [String] details
# # @param assert_finished [true, false] when true(default), waits for
# @param req [Object, String] the object to send or it's marshal form. # FINISHED.
# @param marshalled [false, true] indicates if the object is already def send_status(code = OK, details = '', assert_finished = false)
# marshalled. assert_queue_is_ready
def remote_send(req, marshalled=false) @call.start_write_status(code, details, self)
assert_queue_is_ready ev = @cq.pluck(self, INFINITE_FUTURE)
logger.debug("sending payload #{req.inspect}, marshalled? #{marshalled}") begin
if marshalled assert_event_type(ev, FINISH_ACCEPTED)
payload = req ensure
else ev.close
payload = @marshal.call(req) end
end logger.debug("Status sent: #{code}:'#{details}'")
@call.start_write(Core::ByteBuffer.new(payload), self) return finished if assert_finished
nil
# call queue#pluck, and wait for WRITE_ACCEPTED, so as not to return
# until the flow control allows another send on this call.
ev = @cq.pluck(self, INFINITE_FUTURE)
begin
assert_event_type(ev, WRITE_ACCEPTED)
ensure
ev.close
end end
end
# send_status sends a status to the remote endpoint # remote_read reads a response from the remote endpoint.
# #
# @param code [int] the status code to send # It blocks until the remote endpoint sends a READ or FINISHED event. On
# @param details [String] details # a READ, it returns the response after unmarshalling it. On
# @param assert_finished [true, false] when true(default), waits for # FINISHED, it returns nil if the status is OK, otherwise raising
# FINISHED. # BadStatus
def send_status(code=OK, details='', assert_finished=false) def remote_read
assert_queue_is_ready if @call.metadata.nil? && !@read_metadata_tag.nil?
@call.start_write_status(code, details, self) ev = @cq.pluck(@read_metadata_tag, INFINITE_FUTURE)
ev = @cq.pluck(self, INFINITE_FUTURE) assert_event_type(ev, CLIENT_METADATA_READ)
begin @call.metadata = ev.result
assert_event_type(ev, FINISH_ACCEPTED) @read_metadata_tag = nil
ensure end
ev.close
end
logger.debug("Status sent: #{code}:'#{details}'")
if assert_finished
return finished
end
nil
end
# remote_read reads a response from the remote endpoint.
#
# It blocks until the remote endpoint sends a READ or FINISHED event. On
# a READ, it returns the response after unmarshalling it. On
# FINISHED, it returns nil if the status is OK, otherwise raising BadStatus
def remote_read
if @call.metadata.nil? && !@read_metadata_tag.nil?
ev = @cq.pluck(@read_metadata_tag, INFINITE_FUTURE)
assert_event_type(ev, CLIENT_METADATA_READ)
@call.metadata = ev.result
@read_metadata_tag = nil
end
@call.start_read(self) @call.start_read(self)
ev = @cq.pluck(self, INFINITE_FUTURE) ev = @cq.pluck(self, INFINITE_FUTURE)
begin begin
assert_event_type(ev, READ) assert_event_type(ev, READ)
logger.debug("received req: #{ev.result.inspect}") logger.debug("received req: #{ev.result.inspect}")
if !ev.result.nil? unless ev.result.nil?
logger.debug("received req.to_s: #{ev.result.to_s}") logger.debug("received req.to_s: #{ev.result}")
res = @unmarshal.call(ev.result.to_s) res = @unmarshal.call(ev.result.to_s)
logger.debug("received_req (unmarshalled): #{res.inspect}") logger.debug("received_req (unmarshalled): #{res.inspect}")
return res return res
end
ensure
ev.close
end end
ensure logger.debug('found nil; the final response has been sent')
ev.close nil
end end
logger.debug('found nil; the final response has been sent')
nil
end
# each_remote_read passes each response to the given block or returns an # each_remote_read passes each response to the given block or returns an
# enumerator the responses if no block is given. # enumerator the responses if no block is given.
# #
# == Enumerator == # == Enumerator ==
# #
# * #next blocks until the remote endpoint sends a READ or FINISHED # * #next blocks until the remote endpoint sends a READ or FINISHED
# * for each read, enumerator#next yields the response # * for each read, enumerator#next yields the response
# * on status # * on status
# * if it's is OK, enumerator#next raises StopException # * if it's is OK, enumerator#next raises StopException
# * if is not OK, enumerator#next raises RuntimeException # * if is not OK, enumerator#next raises RuntimeException
# #
# == Block == # == Block ==
# #
# * if provided it is executed for each response # * if provided it is executed for each response
# * the call blocks until no more responses are provided # * the call blocks until no more responses are provided
# #
# @return [Enumerator] if no block was given # @return [Enumerator] if no block was given
def each_remote_read def each_remote_read
return enum_for(:each_remote_read) if !block_given? return enum_for(:each_remote_read) unless block_given?
loop do loop do
resp = remote_read() resp = remote_read
break if resp.is_a?Struct::Status # is an OK status, bad statii raise break if resp.is_a? Struct::Status # is an OK status
break if resp.nil? # the last response was received break if resp.nil? # the last response was received
yield resp yield resp
end
end end
end
# each_remote_read_then_finish passes each response to the given block or # each_remote_read_then_finish passes each response to the given block or
# returns an enumerator of the responses if no block is given. # returns an enumerator of the responses if no block is given.
# #
# It is like each_remote_read, but it blocks on finishing on detecting # It is like each_remote_read, but it blocks on finishing on detecting
# the final message. # the final message.
# #
# == Enumerator == # == Enumerator ==
# #
# * #next blocks until the remote endpoint sends a READ or FINISHED # * #next blocks until the remote endpoint sends a READ or FINISHED
# * for each read, enumerator#next yields the response # * for each read, enumerator#next yields the response
# * on status # * on status
# * if it's is OK, enumerator#next raises StopException # * if it's is OK, enumerator#next raises StopException
# * if is not OK, enumerator#next raises RuntimeException # * if is not OK, enumerator#next raises RuntimeException
# #
# == Block == # == Block ==
# #
# * if provided it is executed for each response # * if provided it is executed for each response
# * the call blocks until no more responses are provided # * the call blocks until no more responses are provided
# #
# @return [Enumerator] if no block was given # @return [Enumerator] if no block was given
def each_remote_read_then_finish def each_remote_read_then_finish
return enum_for(:each_remote_read_then_finish) if !block_given? return enum_for(:each_remote_read_then_finish) unless block_given?
loop do loop do
resp = remote_read resp = remote_read
break if resp.is_a?Struct::Status # is an OK status, bad statii raise break if resp.is_a? Struct::Status # is an OK status
if resp.nil? # the last response was received, but not finished yet if resp.nil? # the last response was received, but not finished yet
finished finished
break break
end
yield resp
end end
yield resp
end end
end
# request_response sends a request to a GRPC server, and returns the # request_response sends a request to a GRPC server, and returns the
# response. # response.
# #
# == Keyword Arguments == # == Keyword Arguments ==
# any keyword arguments are treated as metadata to be sent to the server # any keyword arguments are treated as metadata to be sent to the server
# if a keyword value is a list, multiple metadata for it's key are sent # if a keyword value is a list, multiple metadata for it's key are sent
# #
# @param req [Object] the request sent to the server # @param req [Object] the request sent to the server
# @return [Object] the response received from the server # @return [Object] the response received from the server
def request_response(req, **kw) def request_response(req, **kw)
start_call(**kw) unless @started start_call(**kw) unless @started
remote_send(req) remote_send(req)
writes_done(false) writes_done(false)
response = remote_read response = remote_read
if !response.is_a?(Struct::Status) # finish if status not yet received finished unless response.is_a? Struct::Status
finished response
end end
response
end
# client_streamer sends a stream of requests to a GRPC server, and # client_streamer sends a stream of requests to a GRPC server, and
# returns a single response. # returns a single response.
# #
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's # requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# #each enumeration protocol. In the simplest case, requests will be an # #each enumeration protocol. In the simplest case, requests will be an
# array of marshallable objects; in typical case it will be an Enumerable # array of marshallable objects; in typical case it will be an Enumerable
# that allows dynamic construction of the marshallable objects. # that allows dynamic construction of the marshallable objects.
# #
# == Keyword Arguments == # == Keyword Arguments ==
# any keyword arguments are treated as metadata to be sent to the server # any keyword arguments are treated as metadata to be sent to the server
# if a keyword value is a list, multiple metadata for it's key are sent # if a keyword value is a list, multiple metadata for it's key are sent
# #
# @param requests [Object] an Enumerable of requests to send # @param requests [Object] an Enumerable of requests to send
# @return [Object] the response received from the server # @return [Object] the response received from the server
def client_streamer(requests, **kw) def client_streamer(requests, **kw)
start_call(**kw) unless @started start_call(**kw) unless @started
requests.each { |r| remote_send(r) } requests.each { |r| remote_send(r) }
writes_done(false) writes_done(false)
response = remote_read response = remote_read
if !response.is_a?(Struct::Status) # finish if status not yet received finished unless response.is_a? Struct::Status
finished response
end end
response
end
# server_streamer sends one request to the GRPC server, which yields a # server_streamer sends one request to the GRPC server, which yields a
# stream of responses. # stream of responses.
# #
# responses provides an enumerator over the streamed responses, i.e. it # responses provides an enumerator over the streamed responses, i.e. it
# follows Ruby's #each iteration protocol. The enumerator blocks while # follows Ruby's #each iteration protocol. The enumerator blocks while
# waiting for each response, stops when the server signals that no # waiting for each response, stops when the server signals that no
# further responses will be supplied. If the implicit block is provided, # further responses will be supplied. If the implicit block is provided,
# it is executed with each response as the argument and no result is # it is executed with each response as the argument and no result is
# returned. # returned.
# #
# == Keyword Arguments == # == Keyword Arguments ==
# any keyword arguments are treated as metadata to be sent to the server # any keyword arguments are treated as metadata to be sent to the server
# if a keyword value is a list, multiple metadata for it's key are sent # if a keyword value is a list, multiple metadata for it's key are sent
# any keyword arguments are treated as metadata to be sent to the server. # any keyword arguments are treated as metadata to be sent to the server.
# #
# @param req [Object] the request sent to the server # @param req [Object] the request sent to the server
# @return [Enumerator|nil] a response Enumerator # @return [Enumerator|nil] a response Enumerator
def server_streamer(req, **kw) def server_streamer(req, **kw)
start_call(**kw) unless @started start_call(**kw) unless @started
remote_send(req) remote_send(req)
writes_done(false) writes_done(false)
replies = enum_for(:each_remote_read_then_finish) replies = enum_for(:each_remote_read_then_finish)
return replies if !block_given? return replies unless block_given?
replies.each { |r| yield r } replies.each { |r| yield r }
end end
# bidi_streamer sends a stream of requests to the GRPC server, and yields # bidi_streamer sends a stream of requests to the GRPC server, and yields
# a stream of responses. # a stream of responses.
# #
# This method takes an Enumerable of requests, and returns and enumerable # This method takes an Enumerable of requests, and returns and enumerable
# of responses. # of responses.
# #
# == requests == # == requests ==
# #
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's #each # requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# enumeration protocol. In the simplest case, requests will be an array of # #each enumeration protocol. In the simplest case, requests will be an
# marshallable objects; in typical case it will be an Enumerable that # array of marshallable objects; in typical case it will be an
# allows dynamic construction of the marshallable objects. # Enumerable that allows dynamic construction of the marshallable
# # objects.
# == responses == #
# # == responses ==
# This is an enumerator of responses. I.e, its #next method blocks #
# waiting for the next response. Also, if at any point the block needs # This is an enumerator of responses. I.e, its #next method blocks
# to consume all the remaining responses, this can be done using #each or # waiting for the next response. Also, if at any point the block needs
# #collect. Calling #each or #collect should only be done if # to consume all the remaining responses, this can be done using #each or
# the_call#writes_done has been called, otherwise the block will loop # #collect. Calling #each or #collect should only be done if
# forever. # the_call#writes_done has been called, otherwise the block will loop
# # forever.
# == Keyword Arguments == #
# any keyword arguments are treated as metadata to be sent to the server # == Keyword Arguments ==
# if a keyword value is a list, multiple metadata for it's key are sent # any keyword arguments are treated as metadata to be sent to the server
# # if a keyword value is a list, multiple metadata for it's key are sent
# @param requests [Object] an Enumerable of requests to send #
# @return [Enumerator, nil] a response Enumerator # @param requests [Object] an Enumerable of requests to send
def bidi_streamer(requests, **kw, &blk) # @return [Enumerator, nil] a response Enumerator
start_call(**kw) unless @started def bidi_streamer(requests, **kw, &blk)
bd = BidiCall.new(@call, @cq, @marshal, @unmarshal, @deadline, start_call(**kw) unless @started
@finished_tag) bd = BidiCall.new(@call, @cq, @marshal, @unmarshal, @deadline,
bd.run_on_client(requests, &blk) @finished_tag)
end bd.run_on_client(requests, &blk)
end
# run_server_bidi orchestrates a BiDi stream processing on a server. # run_server_bidi orchestrates a BiDi stream processing on a server.
# #
# N.B. gen_each_reply is a func(Enumerable<Requests>) # N.B. gen_each_reply is a func(Enumerable<Requests>)
# #
# It takes an enumerable of requests as an arg, in case there is a # It takes an enumerable of requests as an arg, in case there is a
# relationship between the stream of requests and the stream of replies. # relationship between the stream of requests and the stream of replies.
# #
# This does not mean that must necessarily be one. E.g, the replies # This does not mean that must necessarily be one. E.g, the replies
# produced by gen_each_reply could ignore the received_msgs # produced by gen_each_reply could ignore the received_msgs
# #
# @param gen_each_reply [Proc] generates the BiDi stream replies # @param gen_each_reply [Proc] generates the BiDi stream replies
def run_server_bidi(gen_each_reply) def run_server_bidi(gen_each_reply)
bd = BidiCall.new(@call, @cq, @marshal, @unmarshal, @deadline, bd = BidiCall.new(@call, @cq, @marshal, @unmarshal, @deadline,
@finished_tag) @finished_tag)
bd.run_on_server(gen_each_reply) bd.run_on_server(gen_each_reply)
end end
private private
def start_call(**kw) def start_call(**kw)
tags = ActiveCall.client_start_invoke(@call, @cq, @deadline, **kw) tags = ActiveCall.client_start_invoke(@call, @cq, @deadline, **kw)
@finished_tag, @read_metadata_tag = tags @finished_tag, @read_metadata_tag = tags
@started = true @started = true
end end
def self.view_class(*visible_methods) def self.view_class(*visible_methods)
Class.new do Class.new do
extend ::Forwardable extend ::Forwardable
def_delegators :@wrapped, *visible_methods def_delegators :@wrapped, *visible_methods
# @param wrapped [ActiveCall] the call whose methods are shielded # @param wrapped [ActiveCall] the call whose methods are shielded
def initialize(wrapped) def initialize(wrapped)
@wrapped = wrapped @wrapped = wrapped
end
end end
end end
end
# SingleReqView limits access to an ActiveCall's methods for use in server # SingleReqView limits access to an ActiveCall's methods for use in server
# handlers that receive just one request. # handlers that receive just one request.
SingleReqView = view_class(:cancelled, :deadline) SingleReqView = view_class(:cancelled, :deadline)
# MultiReqView limits access to an ActiveCall's methods for use in # MultiReqView limits access to an ActiveCall's methods for use in
# server client_streamer handlers. # server client_streamer handlers.
MultiReqView = view_class(:cancelled, :deadline, :each_queued_msg, MultiReqView = view_class(:cancelled, :deadline, :each_queued_msg,
:each_remote_read) :each_remote_read)
# Operation limits access to an ActiveCall's methods for use as # Operation limits access to an ActiveCall's methods for use as
# a Operation on the client. # a Operation on the client.
Operation = view_class(:cancel, :cancelled, :deadline, :execute, :metadata, Operation = view_class(:cancel, :cancelled, :deadline, :execute,
:status) :metadata, :status)
# confirms that no events are enqueued, and that the queue is not # confirms that no events are enqueued, and that the queue is not
# shutdown. # shutdown.
def assert_queue_is_ready def assert_queue_is_ready
ev = nil ev = nil
begin begin
ev = @cq.pluck(self, ZERO) ev = @cq.pluck(self, ZERO)
raise "unexpected event #{ev.inspect}" unless ev.nil? fail "unexpected event #{ev.inspect}" unless ev.nil?
rescue OutOfTime rescue OutOfTime
# expected, nothing should be on the queue and the deadline was ZERO, logging.debug('timed out waiting for next event')
# except things using another tag # expected, nothing should be on the queue and the deadline was ZERO,
ensure # except things using another tag
ev.close unless ev.nil? ensure
ev.close unless ev.nil?
end
end end
end end
end end
end end

@ -31,194 +31,195 @@ require 'forwardable'
require 'grpc/grpc' require 'grpc/grpc'
def assert_event_type(ev, want) def assert_event_type(ev, want)
raise OutOfTime if ev.nil? fail OutOfTime if ev.nil?
got = ev.type got = ev.type
raise 'Unexpected rpc event: got %s, want %s' % [got, want] unless got == want fail("Unexpected rpc event: got #{got}, want #{want}") unless got == want
end end
module Google::RPC module Google
# Google::RPC contains the General RPC module.
# The BiDiCall class orchestrates exection of a BiDi stream on a client or module RPC
# server. # The BiDiCall class orchestrates exection of a BiDi stream on a client or
class BidiCall # server.
include Core::CompletionType class BidiCall
include Core::StatusCodes include Core::CompletionType
include Core::TimeConsts include Core::StatusCodes
include Core::TimeConsts
# Creates a BidiCall.
# # Creates a BidiCall.
# BidiCall should only be created after a call is accepted. That means #
# different things on a client and a server. On the client, the call is # BidiCall should only be created after a call is accepted. That means
# accepted after call.start_invoke followed by receipt of the corresponding # different things on a client and a server. On the client, the call is
# INVOKE_ACCEPTED. On the server, this is after call.accept. # accepted after call.start_invoke followed by receipt of the
# # corresponding INVOKE_ACCEPTED. On the server, this is after
# #initialize cannot determine if the call is accepted or not; so if a # call.accept.
# call that's not accepted is used here, the error won't be visible until #
# the BidiCall#run is called. # #initialize cannot determine if the call is accepted or not; so if a
# # call that's not accepted is used here, the error won't be visible until
# deadline is the absolute deadline for the call. # the BidiCall#run is called.
# #
# @param call [Call] the call used by the ActiveCall # deadline is the absolute deadline for the call.
# @param q [CompletionQueue] the completion queue used to accept #
# the call # @param call [Call] the call used by the ActiveCall
# @param marshal [Function] f(obj)->string that marshal requests # @param q [CompletionQueue] the completion queue used to accept
# @param unmarshal [Function] f(string)->obj that unmarshals responses # the call
# @param deadline [Fixnum] the deadline for the call to complete # @param marshal [Function] f(obj)->string that marshal requests
# @param finished_tag [Object] the object used as the call's finish tag, # @param unmarshal [Function] f(string)->obj that unmarshals responses
def initialize(call, q, marshal, unmarshal, deadline, finished_tag) # @param deadline [Fixnum] the deadline for the call to complete
raise ArgumentError.new('not a call') unless call.is_a?Core::Call # @param finished_tag [Object] the object used as the call's finish tag,
if !q.is_a?Core::CompletionQueue def initialize(call, q, marshal, unmarshal, deadline, finished_tag)
raise ArgumentError.new('not a CompletionQueue') fail(ArgumentError, 'not a call') unless call.is_a? Core::Call
unless q.is_a? Core::CompletionQueue
fail(ArgumentError, 'not a CompletionQueue')
end
@call = call
@cq = q
@deadline = deadline
@finished_tag = finished_tag
@marshal = marshal
@readq = Queue.new
@unmarshal = unmarshal
end end
@call = call
@cq = q
@deadline = deadline
@finished_tag = finished_tag
@marshal = marshal
@readq = Queue.new
@unmarshal = unmarshal
end
# Begins orchestration of the Bidi stream for a client sending requests. # Begins orchestration of the Bidi stream for a client sending requests.
# #
# The method either returns an Enumerator of the responses, or accepts a # The method either returns an Enumerator of the responses, or accepts a
# block that can be invoked with each response. # block that can be invoked with each response.
# #
# @param requests the Enumerable of requests to send # @param requests the Enumerable of requests to send
# @return an Enumerator of requests to yield # @return an Enumerator of requests to yield
def run_on_client(requests, &blk) def run_on_client(requests, &blk)
enq_th = start_write_loop(requests) enq_th = start_write_loop(requests)
loop_th = start_read_loop loop_th = start_read_loop
replies = each_queued_msg replies = each_queued_msg
return replies if blk.nil? return replies if blk.nil?
replies.each { |r| blk.call(r) } replies.each { |r| blk.call(r) }
enq_th.join enq_th.join
loop_th.join loop_th.join
end end
# Begins orchestration of the Bidi stream for a server generating replies.
#
# N.B. gen_each_reply is a func(Enumerable<Requests>)
#
# It takes an enumerable of requests as an arg, in case there is a
# relationship between the stream of requests and the stream of replies.
#
# This does not mean that must necessarily be one. E.g, the replies
# produced by gen_each_reply could ignore the received_msgs
#
# @param gen_each_reply [Proc] generates the BiDi stream replies.
def run_on_server(gen_each_reply)
replys = gen_each_reply.call(each_queued_msg)
enq_th = start_write_loop(replys, is_client:false)
loop_th = start_read_loop()
loop_th.join
enq_th.join
end
private
END_OF_READS = :end_of_reads # Begins orchestration of the Bidi stream for a server generating replies.
END_OF_WRITES = :end_of_writes #
# N.B. gen_each_reply is a func(Enumerable<Requests>)
#
# It takes an enumerable of requests as an arg, in case there is a
# relationship between the stream of requests and the stream of replies.
#
# This does not mean that must necessarily be one. E.g, the replies
# produced by gen_each_reply could ignore the received_msgs
#
# @param gen_each_reply [Proc] generates the BiDi stream replies.
def run_on_server(gen_each_reply)
replys = gen_each_reply.call(each_queued_msg)
enq_th = start_write_loop(replys, is_client: false)
loop_th = start_read_loop
loop_th.join
enq_th.join
end
# each_queued_msg yields each message on this instances readq private
#
# - messages are added to the readq by #read_loop END_OF_READS = :end_of_reads
# - iteration ends when the instance itself is added END_OF_WRITES = :end_of_writes
def each_queued_msg
return enum_for(:each_queued_msg) if !block_given? # each_queued_msg yields each message on this instances readq
count = 0 #
loop do # - messages are added to the readq by #read_loop
logger.debug("each_queued_msg: msg##{count}") # - iteration ends when the instance itself is added
count += 1 def each_queued_msg
req = @readq.pop return enum_for(:each_queued_msg) unless block_given?
throw req if req.is_a?StandardError count = 0
break if req.equal?(END_OF_READS) loop do
yield req logger.debug("each_queued_msg: msg##{count}")
count += 1
req = @readq.pop
throw req if req.is_a? StandardError
break if req.equal?(END_OF_READS)
yield req
end
end end
end
# during bidi-streaming, read the requests to send from a separate thread # during bidi-streaming, read the requests to send from a separate thread
# read so that read_loop does not block waiting for requests to read. # read so that read_loop does not block waiting for requests to read.
def start_write_loop(requests, is_client: true) def start_write_loop(requests, is_client: true)
Thread.new do # TODO(temiola) run on a thread pool Thread.new do # TODO(temiola) run on a thread pool
write_tag = Object.new write_tag = Object.new
begin begin
count = 0 count = 0
requests.each do |req| requests.each do |req|
count += 1 count += 1
payload = @marshal.call(req) payload = @marshal.call(req)
@call.start_write(Core::ByteBuffer.new(payload), write_tag) @call.start_write(Core::ByteBuffer.new(payload), write_tag)
ev = @cq.pluck(write_tag, INFINITE_FUTURE) ev = @cq.pluck(write_tag, INFINITE_FUTURE)
begin begin
assert_event_type(ev, WRITE_ACCEPTED) assert_event_type(ev, WRITE_ACCEPTED)
ensure ensure
ev.close ev.close
end end
end
if is_client
@call.writes_done(write_tag)
ev = @cq.pluck(write_tag, INFINITE_FUTURE)
begin
assert_event_type(ev, FINISH_ACCEPTED)
ensure
ev.close
end end
logger.debug("bidi-client: sent #{count} reqs, waiting to finish") if is_client
ev = @cq.pluck(@finished_tag, INFINITE_FUTURE) @call.writes_done(write_tag)
begin ev = @cq.pluck(write_tag, INFINITE_FUTURE)
assert_event_type(ev, FINISHED) begin
ensure assert_event_type(ev, FINISH_ACCEPTED)
ev.close ensure
ev.close
end
logger.debug("bidi-client: sent #{count} reqs, waiting to finish")
ev = @cq.pluck(@finished_tag, INFINITE_FUTURE)
begin
assert_event_type(ev, FINISHED)
ensure
ev.close
end
logger.debug('bidi-client: finished received')
end end
logger.debug('bidi-client: finished received') rescue StandardError => e
logger.warn('bidi: write_loop failed')
logger.warn(e)
end end
rescue StandardError => e
logger.warn('bidi: write_loop failed')
logger.warn(e)
end end
end end
end
# starts the read loop
def start_read_loop()
t = Thread.new do
begin
read_tag = Object.new
count = 0
# queue the initial read before beginning the loop # starts the read loop
loop do def start_read_loop
logger.debug("waiting for read #{count}") Thread.new do
count += 1 begin
@call.start_read(read_tag) read_tag = Object.new
ev = @cq.pluck(read_tag, INFINITE_FUTURE) count = 0
begin
assert_event_type(ev, READ) # queue the initial read before beginning the loop
loop do
# handle the next event. logger.debug("waiting for read #{count}")
if ev.result.nil? count += 1
@readq.push(END_OF_READS) @call.start_read(read_tag)
logger.debug('done reading!') ev = @cq.pluck(read_tag, INFINITE_FUTURE)
break begin
assert_event_type(ev, READ)
# handle the next event.
if ev.result.nil?
@readq.push(END_OF_READS)
logger.debug('done reading!')
break
end
# push the latest read onto the queue and continue reading
logger.debug("received req: #{ev.result}")
res = @unmarshal.call(ev.result.to_s)
@readq.push(res)
ensure
ev.close
end end
# push the latest read onto the queue and continue reading
logger.debug("received req.to_s: #{ev.result.to_s}")
res = @unmarshal.call(ev.result.to_s)
@readq.push(res)
ensure
ev.close
end end
end
rescue StandardError => e rescue StandardError => e
logger.warn('bidi: read_loop failed') logger.warn('bidi: read_loop failed')
logger.warn(e) logger.warn(e)
@readq.push(e) # let each_queued_msg terminate with this error @readq.push(e) # let each_queued_msg terminate with this error
end
end end
end end
end end
end end
end end

@ -30,377 +30,381 @@
require 'grpc/generic/active_call' require 'grpc/generic/active_call'
require 'xray/thread_dump_signal_handler' require 'xray/thread_dump_signal_handler'
module Google::RPC module Google
# Google::RPC contains the General RPC module.
module RPC
# ClientStub represents an endpoint used to send requests to GRPC servers.
class ClientStub
include Core::StatusCodes
# ClientStub represents an endpoint used to send requests to GRPC servers. # Default deadline is 5 seconds.
class ClientStub DEFAULT_DEADLINE = 5
include Core::StatusCodes
# Default deadline is 5 seconds. # Creates a new ClientStub.
DEFAULT_DEADLINE = 5 #
# Minimally, a stub is created with the just the host of the gRPC service
# Creates a new ClientStub. # it wishes to access, e.g.,
# #
# Minimally, a stub is created with the just the host of the gRPC service # my_stub = ClientStub.new(example.host.com:50505)
# it wishes to access, e.g., #
# # Any arbitrary keyword arguments are treated as channel arguments used to
# my_stub = ClientStub.new(example.host.com:50505) # configure the RPC connection to the host.
# #
# Any arbitrary keyword arguments are treated as channel arguments used to # There are some specific keyword args that are not used to configure the
# configure the RPC connection to the host. # channel:
# #
# There are some specific keyword args that are not used to configure the # - :channel_override
# channel: # when present, this must be a pre-created GRPC::Channel. If it's
# # present the host and arbitrary keyword arg areignored, and the RPC
# - :channel_override # connection uses this channel.
# when present, this must be a pre-created GRPC::Channel. If it's present #
# the host and arbitrary keyword arg areignored, and the RPC connection uses # - :deadline
# this channel. # when present, this is the default deadline used for calls
# #
# - :deadline # - :update_metadata
# when present, this is the default deadline used for calls # when present, this a func that takes a hash and returns a hash
# # it can be used to update metadata, i.e, remove, change or update
# - :update_metadata # amend metadata values.
# when present, this a func that takes a hash and returns a hash #
# it can be used to update metadata, i.e, remove, change or update # @param host [String] the host the stub connects to
# amend metadata values. # @param q [Core::CompletionQueue] used to wait for events
# # @param channel_override [Core::Channel] a pre-created channel
# @param host [String] the host the stub connects to # @param deadline [Number] the default deadline to use in requests
# @param q [Core::CompletionQueue] used to wait for events # @param creds [Core::Credentials] the channel
# @param channel_override [Core::Channel] a pre-created channel # @param update_metadata a func that updates metadata as described above
# @param deadline [Number] the default deadline to use in requests # @param kw [KeywordArgs]the channel arguments
# @param creds [Core::Credentials] secures and/or authenticates the channel def initialize(host, q,
# @param update_metadata a func that updates metadata as described above channel_override:nil,
# @param kw [KeywordArgs]the channel arguments deadline: DEFAULT_DEADLINE,
def initialize(host, q, creds: nil,
channel_override:nil, update_metadata: nil,
deadline: DEFAULT_DEADLINE, **kw)
creds: nil, unless q.is_a? Core::CompletionQueue
update_metadata: nil, fail(ArgumentError, 'not a CompletionQueue')
**kw) end
if !q.is_a?Core::CompletionQueue @queue = q
raise ArgumentError.new('not a CompletionQueue')
end
@queue = q
# set the channel instance # set the channel instance
if !channel_override.nil? if !channel_override.nil?
ch = channel_override ch = channel_override
raise ArgumentError.new('not a Channel') unless ch.is_a?(Core::Channel) fail(ArgumentError, 'not a Channel') unless ch.is_a? Core::Channel
elsif creds.nil? else
ch = Core::Channel.new(host, kw) if creds.nil?
elsif !creds.is_a?(Core::Credentials) ch = Core::Channel.new(host, kw)
raise ArgumentError.new('not a Credentials') elsif !creds.is_a?(Core::Credentials)
else fail(ArgumentError, 'not a Credentials')
ch = Core::Channel.new(host, kw, creds) else
end ch = Core::Channel.new(host, kw, creds)
@ch = ch end
end
@ch = ch
@update_metadata = nil @update_metadata = nil
if !update_metadata.nil? unless update_metadata.nil?
if !update_metadata.is_a?(Proc) unless update_metadata.is_a? Proc
raise ArgumentError.new('update_metadata is not a Proc') fail(ArgumentError, 'update_metadata is not a Proc')
end
@update_metadata = update_metadata
end end
@update_metadata = update_metadata
@host = host
@deadline = deadline
end end
# request_response sends a request to a GRPC server, and returns the
# response.
#
# == Flow Control ==
# This is a blocking call.
#
# * it does not return until a response is received.
#
# * the requests is sent only when GRPC core's flow control allows it to
# be sent.
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status
#
# * the deadline is exceeded
#
# == Return Value ==
#
# If return_op is false, the call returns the response
#
# If return_op is true, the call returns an Operation, calling execute
# on the Operation returns the response.
#
# == Keyword Args ==
#
# Unspecified keyword arguments are treated as metadata to be sent to the
# server.
#
# @param method [String] the RPC method to call on the GRPC server
# @param req [Object] the request sent to the server
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Numeric] (optional) the max completion time in seconds
# @param return_op [true|false] return an Operation if true
# @return [Object] the response received from the server
def request_response(method, req, marshal, unmarshal, deadline = nil,
return_op: false, **kw)
c = new_active_call(method, marshal, unmarshal, deadline || @deadline)
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone)
return c.request_response(req, **md) unless return_op
@host = host # return the operation view of the active_call; define #execute as a
@deadline = deadline # new method for this instance that invokes #request_response.
end op = c.operation
op.define_singleton_method(:execute) do
c.request_response(req, **md)
end
op
end
# request_response sends a request to a GRPC server, and returns the # client_streamer sends a stream of requests to a GRPC server, and
# response. # returns a single response.
# #
# == Flow Control == # requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# This is a blocking call. # #each enumeration protocol. In the simplest case, requests will be an
# # array of marshallable objects; in typical case it will be an Enumerable
# * it does not return until a response is received. # that allows dynamic construction of the marshallable objects.
# #
# * the requests is sent only when GRPC core's flow control allows it to # == Flow Control ==
# be sent. # This is a blocking call.
# #
# == Errors == # * it does not return until a response is received.
# An RuntimeError is raised if #
# # * each requests is sent only when GRPC core's flow control allows it to
# * the server responds with a non-OK status # be sent.
# #
# * the deadline is exceeded # == Errors ==
# # An RuntimeError is raised if
# == Return Value == #
# # * the server responds with a non-OK status
# If return_op is false, the call returns the response #
# # * the deadline is exceeded
# If return_op is true, the call returns an Operation, calling execute #
# on the Operation returns the response. # == Return Value ==
# #
# == Keyword Args == # If return_op is false, the call consumes the requests and returns
# # the response.
# Unspecified keyword arguments are treated as metadata to be sent to the #
# server. # If return_op is true, the call returns the response.
# #
# @param method [String] the RPC method to call on the GRPC server # == Keyword Args ==
# @param req [Object] the request sent to the server #
# @param marshal [Function] f(obj)->string that marshals requests # Unspecified keyword arguments are treated as metadata to be sent to the
# @param unmarshal [Function] f(string)->obj that unmarshals responses # server.
# @param deadline [Numeric] (optional) the max completion time in seconds #
# @param return_op [true|false] (default false) return an Operation if true # @param method [String] the RPC method to call on the GRPC server
# @return [Object] the response received from the server # @param requests [Object] an Enumerable of requests to send
def request_response(method, req, marshal, unmarshal, deadline=nil, # @param marshal [Function] f(obj)->string that marshals requests
return_op:false, **kw) # @param unmarshal [Function] f(string)->obj that unmarshals responses
c = new_active_call(method, marshal, unmarshal, deadline || @deadline) # @param deadline [Numeric] the max completion time in seconds
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone) # @param return_op [true|false] return an Operation if true
return c.request_response(req, **md) unless return_op # @return [Object|Operation] the response received from the server
def client_streamer(method, requests, marshal, unmarshal, deadline = nil,
return_op: false, **kw)
c = new_active_call(method, marshal, unmarshal, deadline || @deadline)
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone)
return c.client_streamer(requests, **md) unless return_op
# return the operation view of the active_call; define #execute as a # return the operation view of the active_call; define #execute as a
# new method for this instance that invokes #request_response. # new method for this instance that invokes #client_streamer.
op = c.operation op = c.operation
op.define_singleton_method(:execute) do op.define_singleton_method(:execute) do
c.request_response(req, **md) c.client_streamer(requests, **md)
end
op
end end
op
end
# client_streamer sends a stream of requests to a GRPC server, and # server_streamer sends one request to the GRPC server, which yields a
# returns a single response. # stream of responses.
# #
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's # responses provides an enumerator over the streamed responses, i.e. it
# #each enumeration protocol. In the simplest case, requests will be an # follows Ruby's #each iteration protocol. The enumerator blocks while
# array of marshallable objects; in typical case it will be an Enumerable # waiting for each response, stops when the server signals that no
# that allows dynamic construction of the marshallable objects. # further responses will be supplied. If the implicit block is provided,
# # it is executed with each response as the argument and no result is
# == Flow Control == # returned.
# This is a blocking call. #
# # == Flow Control ==
# * it does not return until a response is received. # This is a blocking call.
# #
# * each requests is sent only when GRPC core's flow control allows it to # * the request is sent only when GRPC core's flow control allows it to
# be sent. # be sent.
# #
# == Errors == # * the request will not complete until the server sends the final
# An RuntimeError is raised if # response followed by a status message.
# #
# * the server responds with a non-OK status # == Errors ==
# # An RuntimeError is raised if
# * the deadline is exceeded #
# # * the server responds with a non-OK status when any response is
# == Return Value == # * retrieved
# #
# If return_op is false, the call consumes the requests and returns # * the deadline is exceeded
# the response. #
# # == Return Value ==
# If return_op is true, the call returns the response. #
# # if the return_op is false, the return value is an Enumerator of the
# == Keyword Args == # results, unless a block is provided, in which case the block is
# # executed with each response.
# Unspecified keyword arguments are treated as metadata to be sent to the #
# server. # if return_op is true, the function returns an Operation whose #execute
# # method runs server streamer call. Again, Operation#execute either
# @param method [String] the RPC method to call on the GRPC server # calls the given block with each response or returns an Enumerator of the
# @param requests [Object] an Enumerable of requests to send # responses.
# @param marshal [Function] f(obj)->string that marshals requests #
# @param unmarshal [Function] f(string)->obj that unmarshals responses # == Keyword Args ==
# @param deadline [Numeric] the max completion time in seconds #
# @param return_op [true|false] (default false) return an Operation if true # Unspecified keyword arguments are treated as metadata to be sent to the
# @return [Object|Operation] the response received from the server # server.
def client_streamer(method, requests, marshal, unmarshal, deadline=nil, #
return_op:false, **kw) # @param method [String] the RPC method to call on the GRPC server
c = new_active_call(method, marshal, unmarshal, deadline || @deadline) # @param req [Object] the request sent to the server
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone) # @param marshal [Function] f(obj)->string that marshals requests
return c.client_streamer(requests, **md) unless return_op # @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Numeric] the max completion time in seconds
# @param return_op [true|false]return an Operation if true
# @param blk [Block] when provided, is executed for each response
# @return [Enumerator|Operation|nil] as discussed above
def server_streamer(method, req, marshal, unmarshal, deadline = nil,
return_op: false, **kw, &blk)
c = new_active_call(method, marshal, unmarshal, deadline || @deadline)
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone)
return c.server_streamer(req, **md, &blk) unless return_op
# return the operation view of the active_call; define #execute as a # return the operation view of the active_call; define #execute
# new method for this instance that invokes #client_streamer. # as a new method for this instance that invokes #server_streamer
op = c.operation op = c.operation
op.define_singleton_method(:execute) do op.define_singleton_method(:execute) do
c.client_streamer(requests, **md) c.server_streamer(req, **md, &blk)
end
op
end end
op
end
# server_streamer sends one request to the GRPC server, which yields a # bidi_streamer sends a stream of requests to the GRPC server, and yields
# stream of responses. # a stream of responses.
# #
# responses provides an enumerator over the streamed responses, i.e. it # This method takes an Enumerable of requests, and returns and enumerable
# follows Ruby's #each iteration protocol. The enumerator blocks while # of responses.
# waiting for each response, stops when the server signals that no #
# further responses will be supplied. If the implicit block is provided, # == requests ==
# it is executed with each response as the argument and no result is #
# returned. # requests provides an 'iterable' of Requests. I.e. it follows Ruby's
# # #each enumeration protocol. In the simplest case, requests will be an
# == Flow Control == # array of marshallable objects; in typical case it will be an
# This is a blocking call. # Enumerable that allows dynamic construction of the marshallable
# # objects.
# * the request is sent only when GRPC core's flow control allows it to #
# be sent. # == responses ==
# #
# * the request will not complete until the server sends the final response # This is an enumerator of responses. I.e, its #next method blocks
# followed by a status message. # waiting for the next response. Also, if at any point the block needs
# # to consume all the remaining responses, this can be done using #each or
# == Errors == # #collect. Calling #each or #collect should only be done if
# An RuntimeError is raised if # the_call#writes_done has been called, otherwise the block will loop
# # forever.
# * the server responds with a non-OK status when any response is #
# * retrieved # == Flow Control ==
# # This is a blocking call.
# * the deadline is exceeded #
# # * the call completes when the next call to provided block returns
# == Return Value == # * [False]
# #
# if the return_op is false, the return value is an Enumerator of the # * the execution block parameters are two objects for sending and
# results, unless a block is provided, in which case the block is # receiving responses, each of which blocks waiting for flow control.
# executed with each response. # E.g, calles to bidi_call#remote_send will wait until flow control
# # allows another write before returning; and obviously calls to
# if return_op is true, the function returns an Operation whose #execute # responses#next block until the next response is available.
# method runs server streamer call. Again, Operation#execute either #
# calls the given block with each response or returns an Enumerator of the # == Termination ==
# responses. #
# # As well as sending and receiving messages, the block passed to the
# == Keyword Args == # function is also responsible for:
# #
# Unspecified keyword arguments are treated as metadata to be sent to the # * calling bidi_call#writes_done to indicate no further reqs will be
# server. # sent.
# #
# @param method [String] the RPC method to call on the GRPC server # * returning false if once the bidi stream is functionally completed.
# @param req [Object] the request sent to the server #
# @param marshal [Function] f(obj)->string that marshals requests # Note that response#next will indicate that there are no further
# @param unmarshal [Function] f(string)->obj that unmarshals responses # responses by throwing StopIteration, but can only happen either
# @param deadline [Numeric] the max completion time in seconds # if bidi_call#writes_done is called.
# @param return_op [true|false] (default false) return an Operation if true #
# @param blk [Block] when provided, is executed for each response # To terminate the RPC correctly the block:
# @return [Enumerator|Operation|nil] as discussed above #
def server_streamer(method, req, marshal, unmarshal, deadline=nil, # * must call bidi#writes_done and then
return_op:false, **kw, &blk) #
c = new_active_call(method, marshal, unmarshal, deadline || @deadline) # * either return false as soon as there is no need for other responses
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone) #
return c.server_streamer(req, **md, &blk) unless return_op # * loop on responses#next until no further responses are available
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status when any response is
# * retrieved
#
# * the deadline is exceeded
#
#
# == Keyword Args ==
#
# Unspecified keyword arguments are treated as metadata to be sent to the
# server.
#
# == Return Value ==
#
# if the return_op is false, the return value is an Enumerator of the
# results, unless a block is provided, in which case the block is
# executed with each response.
#
# if return_op is true, the function returns an Operation whose #execute
# method runs the Bidi call. Again, Operation#execute either calls a
# given block with each response or returns an Enumerator of the
# responses.
#
# @param method [String] the RPC method to call on the GRPC server
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Numeric] (optional) the max completion time in seconds
# @param blk [Block] when provided, is executed for each response
# @param return_op [true|false] return an Operation if true
# @return [Enumerator|nil|Operation] as discussed above
def bidi_streamer(method, requests, marshal, unmarshal, deadline = nil,
return_op: false, **kw, &blk)
c = new_active_call(method, marshal, unmarshal, deadline || @deadline)
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone)
return c.bidi_streamer(requests, **md, &blk) unless return_op
# return the operation view of the active_call; define #execute # return the operation view of the active_call; define #execute
# as a new method for this instance that invokes #server_streamer # as a new method for this instance that invokes #bidi_streamer
op = c.operation op = c.operation
op.define_singleton_method(:execute) do op.define_singleton_method(:execute) do
c.server_streamer(req, **md, &blk) c.bidi_streamer(requests, **md, &blk)
end
op
end end
op
end
# bidi_streamer sends a stream of requests to the GRPC server, and yields private
# a stream of responses.
#
# This method takes an Enumerable of requests, and returns and enumerable
# of responses.
#
# == requests ==
#
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's #each
# enumeration protocol. In the simplest case, requests will be an array of
# marshallable objects; in typical case it will be an Enumerable that
# allows dynamic construction of the marshallable objects.
#
# == responses ==
#
# This is an enumerator of responses. I.e, its #next method blocks
# waiting for the next response. Also, if at any point the block needs
# to consume all the remaining responses, this can be done using #each or
# #collect. Calling #each or #collect should only be done if
# the_call#writes_done has been called, otherwise the block will loop
# forever.
#
# == Flow Control ==
# This is a blocking call.
#
# * the call completes when the next call to provided block returns
# * [False]
#
# * the execution block parameters are two objects for sending and
# receiving responses, each of which blocks waiting for flow control.
# E.g, calles to bidi_call#remote_send will wait until flow control
# allows another write before returning; and obviously calls to
# responses#next block until the next response is available.
#
# == Termination ==
#
# As well as sending and receiving messages, the block passed to the
# function is also responsible for:
#
# * calling bidi_call#writes_done to indicate no further reqs will be
# sent.
#
# * returning false if once the bidi stream is functionally completed.
#
# Note that response#next will indicate that there are no further
# responses by throwing StopIteration, but can only happen either
# if bidi_call#writes_done is called.
#
# To terminate the RPC correctly the block:
#
# * must call bidi#writes_done and then
#
# * either return false as soon as there is no need for other responses
#
# * loop on responses#next until no further responses are available
#
# == Errors ==
# An RuntimeError is raised if
#
# * the server responds with a non-OK status when any response is
# * retrieved
#
# * the deadline is exceeded
#
#
# == Keyword Args ==
#
# Unspecified keyword arguments are treated as metadata to be sent to the
# server.
#
# == Return Value ==
#
# if the return_op is false, the return value is an Enumerator of the
# results, unless a block is provided, in which case the block is
# executed with each response.
#
# if return_op is true, the function returns an Operation whose #execute
# method runs the Bidi call. Again, Operation#execute either calls a
# given block with each response or returns an Enumerator of the responses.
#
# @param method [String] the RPC method to call on the GRPC server
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [Numeric] (optional) the max completion time in seconds
# @param blk [Block] when provided, is executed for each response
# @param return_op [true|false] (default false) return an Operation if true
# @return [Enumerator|nil|Operation] as discussed above
def bidi_streamer(method, requests, marshal, unmarshal, deadline=nil,
return_op:false, **kw, &blk)
c = new_active_call(method, marshal, unmarshal, deadline || @deadline)
md = @update_metadata.nil? ? kw : @update_metadata.call(kw.clone)
return c.bidi_streamer(requests, **md, &blk) unless return_op
# return the operation view of the active_call; define #execute # Creates a new active stub
# as a new method for this instance that invokes #bidi_streamer #
op = c.operation # @param ch [GRPC::Channel] the channel used to create the stub.
op.define_singleton_method(:execute) do # @param marshal [Function] f(obj)->string that marshals requests
c.bidi_streamer(requests, **md, &blk) # @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [TimeConst]
def new_active_call(ch, marshal, unmarshal, deadline = nil)
absolute_deadline = Core::TimeConsts.from_relative_time(deadline)
call = @ch.create_call(ch, @host, absolute_deadline)
ActiveCall.new(call, @queue, marshal, unmarshal, absolute_deadline,
started: false)
end end
op
end end
private
# Creates a new active stub
#
# @param ch [GRPC::Channel] the channel used to create the stub.
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
# @param deadline [TimeConst]
def new_active_call(ch, marshal, unmarshal, deadline=nil)
absolute_deadline = Core::TimeConsts.from_relative_time(deadline)
call = @ch.create_call(ch, @host, absolute_deadline)
ActiveCall.new(call, @queue, marshal, unmarshal, absolute_deadline,
started:false)
end
end end
end end

@ -29,54 +29,51 @@
require 'grpc/grpc' require 'grpc/grpc'
module Google::RPC module Google
module RPC
# RpcDesc is a Descriptor of an RPC method. # RpcDesc is a Descriptor of an RPC method.
class RpcDesc < Struct.new(:name, :input, :output, :marshal_method, class RpcDesc < Struct.new(:name, :input, :output, :marshal_method,
:unmarshal_method) :unmarshal_method)
include Core::StatusCodes include Core::StatusCodes
# Used to wrap a message class to indicate that it needs to be streamed. # Used to wrap a message class to indicate that it needs to be streamed.
class Stream class Stream
attr_accessor :type attr_accessor :type
def initialize(type) def initialize(type)
@type = type @type = type
end
end end
end
# @return [Proc] { |instance| marshalled(instance) } # @return [Proc] { |instance| marshalled(instance) }
def marshal_proc def marshal_proc
Proc.new { |o| o.class.method(marshal_method).call(o).to_s } proc { |o| o.class.method(marshal_method).call(o).to_s }
end end
# @param [:input, :output] target determines whether to produce the an # @param [:input, :output] target determines whether to produce the an
# unmarshal Proc for the rpc input parameter or # unmarshal Proc for the rpc input parameter or
# its output parameter # its output parameter
# #
# @return [Proc] An unmarshal proc { |marshalled(instance)| instance } # @return [Proc] An unmarshal proc { |marshalled(instance)| instance }
def unmarshal_proc(target) def unmarshal_proc(target)
raise ArgumentError if not [:input, :output].include?(target) fail ArgumentError unless [:input, :output].include?(target)
unmarshal_class = method(target).call unmarshal_class = method(target).call
if unmarshal_class.is_a?Stream unmarshal_class = unmarshal_class.type if unmarshal_class.is_a? Stream
unmarshal_class = unmarshal_class.type proc { |o| unmarshal_class.method(unmarshal_method).call(o) }
end end
Proc.new { |o| unmarshal_class.method(unmarshal_method).call(o) }
end
def run_server_method(active_call, mth) def run_server_method(active_call, mth)
# While a server method is running, it might be cancelled, its deadline # While a server method is running, it might be cancelled, its deadline
# might be reached, the handler could throw an unknown error, or a # might be reached, the handler could throw an unknown error, or a
# well-behaved handler could throw a StatusError. # well-behaved handler could throw a StatusError.
begin if request_response?
if is_request_response?
req = active_call.remote_read req = active_call.remote_read
resp = mth.call(req, active_call.single_req_view) resp = mth.call(req, active_call.single_req_view)
active_call.remote_send(resp) active_call.remote_send(resp)
elsif is_client_streamer? elsif client_streamer?
resp = mth.call(active_call.multi_req_view) resp = mth.call(active_call.multi_req_view)
active_call.remote_send(resp) active_call.remote_send(resp)
elsif is_server_streamer? elsif server_streamer?
req = active_call.remote_read req = active_call.remote_read
replys = mth.call(req, active_call.single_req_view) replys = mth.call(req, active_call.single_req_view)
replys.each { |r| active_call.remote_send(r) } replys.each { |r| active_call.remote_send(r) }
@ -88,7 +85,7 @@ module Google::RPC
rescue BadStatus => e rescue BadStatus => e
# this is raised by handlers that want GRPC to send an application # this is raised by handlers that want GRPC to send an application
# error code and detail message. # error code and detail message.
logger.debug("app error: #{active_call}, status:#{e.code}:#{e.details}") logger.debug("app err: #{active_call}, status:#{e.code}:#{e.details}")
send_status(active_call, e.code, e.details) send_status(active_call, e.code, e.details)
rescue Core::CallError => e rescue Core::CallError => e
# This is raised by GRPC internals but should rarely, if ever happen. # This is raised by GRPC internals but should rarely, if ever happen.
@ -110,50 +107,46 @@ module Google::RPC
logger.warn(e) logger.warn(e)
send_status(active_call, UNKNOWN, 'no reason given') send_status(active_call, UNKNOWN, 'no reason given')
end end
end
def assert_arity_matches(mth) def assert_arity_matches(mth)
if (is_request_response? || is_server_streamer?) if request_response? || server_streamer?
if mth.arity != 2 if mth.arity != 2
raise arity_error(mth, 2, "should be #{mth.name}(req, call)") fail arity_error(mth, 2, "should be #{mth.name}(req, call)")
end end
else else
if mth.arity != 1 if mth.arity != 1
raise arity_error(mth, 1, "should be #{mth.name}(call)") fail arity_error(mth, 1, "should be #{mth.name}(call)")
end
end end
end end
end
def is_request_response? def request_response?
!input.is_a?(Stream) && !output.is_a?(Stream) !input.is_a?(Stream) && !output.is_a?(Stream)
end end
def is_client_streamer? def client_streamer?
input.is_a?(Stream) && !output.is_a?(Stream) input.is_a?(Stream) && !output.is_a?(Stream)
end end
def is_server_streamer? def server_streamer?
!input.is_a?(Stream) && output.is_a?(Stream) !input.is_a?(Stream) && output.is_a?(Stream)
end end
def is_bidi_streamer? def bidi_streamer?
input.is_a?(Stream) && output.is_a?(Stream) input.is_a?(Stream) && output.is_a?(Stream)
end end
def arity_error(mth, want, msg) def arity_error(mth, want, msg)
"##{mth.name}: bad arg count; got:#{mth.arity}, want:#{want}, #{msg}" "##{mth.name}: bad arg count; got:#{mth.arity}, want:#{want}, #{msg}"
end end
def send_status(active_client, code, details) def send_status(active_client, code, details)
begin
details = 'Not sure why' if details.nil? details = 'Not sure why' if details.nil?
active_client.send_status(code, details) active_client.send_status(code, details)
rescue StandardError => e rescue StandardError => e
logger.warn('Could not send status %d:%s' % [code, details]) logger.warn("Could not send status #{code}:#{details}")
logger.warn(e) logger.warn(e)
end end
end end
end end
end end

@ -33,382 +33,378 @@ require 'grpc/generic/service'
require 'thread' require 'thread'
require 'xray/thread_dump_signal_handler' require 'xray/thread_dump_signal_handler'
module Google::RPC module Google
# Google::RPC contains the General RPC module.
# RpcServer hosts a number of services and makes them available on the module RPC
# network. # RpcServer hosts a number of services and makes them available on the
class RpcServer # network.
include Core::CompletionType class RpcServer
include Core::TimeConsts include Core::CompletionType
extend ::Forwardable include Core::TimeConsts
extend ::Forwardable
def_delegators :@server, :add_http2_port
def_delegators :@server, :add_http2_port
# Default thread pool size is 3
DEFAULT_POOL_SIZE = 3 # Default thread pool size is 3
DEFAULT_POOL_SIZE = 3
# Default max_waiting_requests size is 20
DEFAULT_MAX_WAITING_REQUESTS = 20 # Default max_waiting_requests size is 20
DEFAULT_MAX_WAITING_REQUESTS = 20
# Creates a new RpcServer.
# # Creates a new RpcServer.
# The RPC server is configured using keyword arguments. #
# # The RPC server is configured using keyword arguments.
# There are some specific keyword args used to configure the RpcServer #
# instance, however other arbitrary are allowed and when present are used # There are some specific keyword args used to configure the RpcServer
# to configure the listeninng connection set up by the RpcServer. # instance, however other arbitrary are allowed and when present are used
# # to configure the listeninng connection set up by the RpcServer.
# * server_override: which if passed must be a [GRPC::Core::Server]. When #
# present. # * server_override: which if passed must be a [GRPC::Core::Server]. When
# # present.
# * poll_period: when present, the server polls for new events with this #
# period # * poll_period: when present, the server polls for new events with this
# # period
# * pool_size: the size of the thread pool the server uses to run its #
# threads # * pool_size: the size of the thread pool the server uses to run its
# # threads
# * completion_queue_override: when supplied, this will be used as the #
# completion_queue that the server uses to receive network events, # * completion_queue_override: when supplied, this will be used as the
# otherwise its creates a new instance itself # completion_queue that the server uses to receive network events,
# # otherwise its creates a new instance itself
# * creds: [GRPC::Core::ServerCredentials] #
# the credentials used to secure the server # * creds: [GRPC::Core::ServerCredentials]
# # the credentials used to secure the server
# * max_waiting_requests: the maximum number of requests that are not #
# being handled to allow. When this limit is exceeded, the server responds # * max_waiting_requests: the maximum number of requests that are not
# with not available to new requests # being handled to allow. When this limit is exceeded, the server responds
def initialize(pool_size:DEFAULT_POOL_SIZE, # with not available to new requests
max_waiting_requests:DEFAULT_MAX_WAITING_REQUESTS, def initialize(pool_size:DEFAULT_POOL_SIZE,
poll_period:INFINITE_FUTURE, max_waiting_requests:DEFAULT_MAX_WAITING_REQUESTS,
completion_queue_override:nil, poll_period:INFINITE_FUTURE,
creds:nil, completion_queue_override:nil,
server_override:nil, creds:nil,
**kw) server_override:nil,
if !completion_queue_override.nil? **kw)
cq = completion_queue_override if completion_queue_override.nil?
if !cq.is_a?(Core::CompletionQueue) cq = Core::CompletionQueue.new
raise ArgumentError.new('not a CompletionQueue') else
cq = completion_queue_override
unless cq.is_a? Core::CompletionQueue
fail(ArgumentError, 'not a CompletionQueue')
end
end end
else @cq = cq
cq = Core::CompletionQueue.new
end if server_override.nil?
@cq = cq if creds.nil?
srv = Core::Server.new(@cq, kw)
if !server_override.nil? elsif !creds.is_a? Core::ServerCredentials
srv = server_override fail(ArgumentError, 'not a ServerCredentials')
raise ArgumentError.new('not a Server') unless srv.is_a?(Core::Server) else
elsif creds.nil? srv = Core::Server.new(@cq, kw, creds)
srv = Core::Server.new(@cq, kw) end
elsif !creds.is_a?(Core::ServerCredentials) else
raise ArgumentError.new('not a ServerCredentials') srv = server_override
else fail(ArgumentError, 'not a Server') unless srv.is_a? Core::Server
srv = Core::Server.new(@cq, kw, creds) end
@server = srv
@pool_size = pool_size
@max_waiting_requests = max_waiting_requests
@poll_period = poll_period
@run_mutex = Mutex.new
@run_cond = ConditionVariable.new
@pool = Pool.new(@pool_size)
end end
@server = srv
@pool_size = pool_size
@max_waiting_requests = max_waiting_requests
@poll_period = poll_period
@run_mutex = Mutex.new
@run_cond = ConditionVariable.new
@pool = Pool.new(@pool_size)
end
# stops a running server # stops a running server
# #
# the call has no impact if the server is already stopped, otherwise # the call has no impact if the server is already stopped, otherwise
# server's current call loop is it's last. # server's current call loop is it's last.
def stop def stop
if @running return unless @running
@stopped = true @stopped = true
@pool.stop @pool.stop
end end
end
# determines if the server is currently running # determines if the server is currently running
def running? def running?
@running ||= false @running ||= false
end end
# Is called from other threads to wait for #run to start up the server. # Is called from other threads to wait for #run to start up the server.
# #
# If run has not been called, this returns immediately. # If run has not been called, this returns immediately.
# #
# @param timeout [Numeric] number of seconds to wait # @param timeout [Numeric] number of seconds to wait
# @result [true, false] true if the server is running, false otherwise # @result [true, false] true if the server is running, false otherwise
def wait_till_running(timeout=0.1) def wait_till_running(timeout = 0.1)
end_time, sleep_period = Time.now + timeout, (1.0 * timeout)/100 end_time, sleep_period = Time.now + timeout, (1.0 * timeout) / 100
while Time.now < end_time while Time.now < end_time
if !running? @run_mutex.synchronize { @run_cond.wait(@run_mutex) } unless running?
@run_mutex.synchronize { @run_cond.wait(@run_mutex) } sleep(sleep_period)
end end
sleep(sleep_period) running?
end end
return running?
end
# determines if the server is currently stopped
def stopped?
@stopped ||= false
end
# handle registration of classes
#
# service is either a class that includes GRPC::GenericService and whose
# #new function can be called without argument or any instance of such a
# class.
#
# E.g, after
#
# class Divider
# include GRPC::GenericService
# rpc :div DivArgs, DivReply # single request, single response
# def initialize(optional_arg='default option') # no args
# ...
# end
#
# srv = GRPC::RpcServer.new(...)
#
# # Either of these works
#
# srv.handle(Divider)
#
# # or
#
# srv.handle(Divider.new('replace optional arg'))
#
# It raises RuntimeError:
# - if service is not valid service class or object
# - if it is a valid service, but the handler methods are already registered
# - if the server is already running
#
# @param service [Object|Class] a service class or object as described
# above
def handle(service)
raise 'cannot add services if the server is running' if running?
raise 'cannot add services if the server is stopped' if stopped?
cls = service.is_a?(Class) ? service : service.class
assert_valid_service_class(cls)
add_rpc_descs_for(service)
end
# runs the server # determines if the server is currently stopped
# def stopped?
# - if no rpc_descs are registered, this exits immediately, otherwise it @stopped ||= false
# continues running permanently and does not return until program exit.
#
# - #running? returns true after this is called, until #stop cause the
# the server to stop.
def run
if rpc_descs.size == 0
logger.warn('did not run as no services were present')
return
end end
@run_mutex.synchronize do
@running = true # handle registration of classes
@run_cond.signal #
# service is either a class that includes GRPC::GenericService and whose
# #new function can be called without argument or any instance of such a
# class.
#
# E.g, after
#
# class Divider
# include GRPC::GenericService
# rpc :div DivArgs, DivReply # single request, single response
# def initialize(optional_arg='default option') # no args
# ...
# end
#
# srv = GRPC::RpcServer.new(...)
#
# # Either of these works
#
# srv.handle(Divider)
#
# # or
#
# srv.handle(Divider.new('replace optional arg'))
#
# It raises RuntimeError:
# - if service is not valid service class or object
# - its handler methods are already registered
# - if the server is already running
#
# @param service [Object|Class] a service class or object as described
# above
def handle(service)
fail 'cannot add services if the server is running' if running?
fail 'cannot add services if the server is stopped' if stopped?
cls = service.is_a?(Class) ? service : service.class
assert_valid_service_class(cls)
add_rpc_descs_for(service)
end end
@pool.start
@server.start # runs the server
server_tag = Object.new #
while !stopped? # - if no rpc_descs are registered, this exits immediately, otherwise it
@server.request_call(server_tag) # continues running permanently and does not return until program exit.
ev = @cq.pluck(server_tag, @poll_period) #
next if ev.nil? # - #running? returns true after this is called, until #stop cause the
if ev.type != SERVER_RPC_NEW # the server to stop.
logger.warn("bad evt: got:#{ev.type}, want:#{SERVER_RPC_NEW}") def run
ev.close if rpc_descs.size == 0
next logger.warn('did not run as no services were present')
return
end end
c = new_active_server_call(ev.call, ev.result) @run_mutex.synchronize do
if !c.nil? @running = true
mth = ev.result.method.to_sym @run_cond.signal
ev.close end
@pool.schedule(c) do |call| @pool.start
rpc_descs[mth].run_server_method(call, rpc_handlers[mth]) @server.start
server_tag = Object.new
until stopped?
@server.request_call(server_tag)
ev = @cq.pluck(server_tag, @poll_period)
next if ev.nil?
if ev.type != SERVER_RPC_NEW
logger.warn("bad evt: got:#{ev.type}, want:#{SERVER_RPC_NEW}")
ev.close
next
end
c = new_active_server_call(ev.call, ev.result)
unless c.nil?
mth = ev.result.method.to_sym
ev.close
@pool.schedule(c) do |call|
rpc_descs[mth].run_server_method(call, rpc_handlers[mth])
end
end end
end end
end @running = false
@running = false
end
def new_active_server_call(call, new_server_rpc)
# TODO(temiola): perhaps reuse the main server completion queue here, but
# for now, create a new completion queue per call, pending best practice
# usage advice from the c core.
# Accept the call. This is necessary even if a status is to be sent back
# immediately
finished_tag = Object.new
call_queue = Core::CompletionQueue.new
call.metadata = new_server_rpc.metadata # store the metadata on the call
call.server_accept(call_queue, finished_tag)
call.server_end_initial_metadata()
# Send UNAVAILABLE if there are too many unprocessed jobs
jobs_count, max = @pool.jobs_waiting, @max_waiting_requests
logger.info("waiting: #{jobs_count}, max: #{max}")
if @pool.jobs_waiting > @max_waiting_requests
logger.warn("NOT AVAILABLE: too many jobs_waiting: #{new_server_rpc}")
noop = Proc.new { |x| x }
c = ActiveCall.new(call, call_queue, noop, noop,
new_server_rpc.deadline, finished_tag: finished_tag)
c.send_status(StatusCodes::UNAVAILABLE, '')
return nil
end end
# Send NOT_FOUND if the method does not exist def new_active_server_call(call, new_server_rpc)
mth = new_server_rpc.method.to_sym # TODO(temiola): perhaps reuse the main server completion queue here,
if !rpc_descs.has_key?(mth) # but for now, create a new completion queue per call, pending best
logger.warn("NOT_FOUND: #{new_server_rpc}") # practice usage advice from the c core.
noop = Proc.new { |x| x }
c = ActiveCall.new(call, call_queue, noop, noop, # Accept the call. This is necessary even if a status is to be sent
new_server_rpc.deadline, finished_tag: finished_tag) # back immediately
c.send_status(StatusCodes::NOT_FOUND, '') finished_tag = Object.new
return nil call_queue = Core::CompletionQueue.new
end call.metadata = new_server_rpc.metadata # store the metadata
call.server_accept(call_queue, finished_tag)
call.server_end_initial_metadata
# Send UNAVAILABLE if there are too many unprocessed jobs
jobs_count, max = @pool.jobs_waiting, @max_waiting_requests
logger.info("waiting: #{jobs_count}, max: #{max}")
if @pool.jobs_waiting > @max_waiting_requests
logger.warn("NOT AVAILABLE: too many jobs_waiting: #{new_server_rpc}")
noop = proc { |x| x }
c = ActiveCall.new(call, call_queue, noop, noop,
new_server_rpc.deadline,
finished_tag: finished_tag)
c.send_status(StatusCodes::UNAVAILABLE, '')
return nil
end
# Create the ActiveCall # Send NOT_FOUND if the method does not exist
rpc_desc = rpc_descs[mth] mth = new_server_rpc.method.to_sym
logger.info("deadline is #{new_server_rpc.deadline}; (now=#{Time.now})") unless rpc_descs.key?(mth)
ActiveCall.new(call, call_queue, logger.warn("NOT_FOUND: #{new_server_rpc}")
rpc_desc.marshal_proc, rpc_desc.unmarshal_proc(:input), noop = proc { |x| x }
new_server_rpc.deadline, finished_tag: finished_tag) c = ActiveCall.new(call, call_queue, noop, noop,
end new_server_rpc.deadline,
finished_tag: finished_tag)
c.send_status(StatusCodes::NOT_FOUND, '')
return nil
end
# Pool is a simple thread pool for running server requests. # Create the ActiveCall
class Pool rpc_desc = rpc_descs[mth]
logger.info("deadline is #{new_server_rpc.deadline}; (now=#{Time.now})")
def initialize(size) ActiveCall.new(call, call_queue,
raise 'pool size must be positive' unless size > 0 rpc_desc.marshal_proc, rpc_desc.unmarshal_proc(:input),
@jobs = Queue.new new_server_rpc.deadline, finished_tag: finished_tag)
@size = size
@stopped = false
@stop_mutex = Mutex.new
@stop_cond = ConditionVariable.new
@workers = []
end end
# Returns the number of jobs waiting # Pool is a simple thread pool for running server requests.
def jobs_waiting class Pool
@jobs.size def initialize(size)
end fail 'pool size must be positive' unless size > 0
@jobs = Queue.new
@size = size
@stopped = false
@stop_mutex = Mutex.new
@stop_cond = ConditionVariable.new
@workers = []
end
# Runs the given block on the queue with the provided args. # Returns the number of jobs waiting
# def jobs_waiting
# @param args the args passed blk when it is called @jobs.size
# @param blk the block to call end
def schedule(*args, &blk)
raise 'already stopped' if @stopped # Runs the given block on the queue with the provided args.
return if blk.nil? #
logger.info('schedule another job') # @param args the args passed blk when it is called
@jobs << [blk, args] # @param blk the block to call
end def schedule(*args, &blk)
fail 'already stopped' if @stopped
return if blk.nil?
logger.info('schedule another job')
@jobs << [blk, args]
end
# Starts running the jobs in the thread pool. # Starts running the jobs in the thread pool.
def start def start
raise 'already stopped' if @stopped fail 'already stopped' if @stopped
until @workers.size == @size.to_i until @workers.size == @size.to_i
next_thread = Thread.new do next_thread = Thread.new do
catch(:exit) do # allows { throw :exit } to kill a thread catch(:exit) do # allows { throw :exit } to kill a thread
loop do loop do
begin begin
blk, args = @jobs.pop blk, args = @jobs.pop
blk.call(*args) blk.call(*args)
rescue StandardError => e rescue StandardError => e
logger.warn('Error in worker thread') logger.warn('Error in worker thread')
logger.warn(e) logger.warn(e)
end
end end
end end
end
# removes the threads from workers, and signal when all the threads # removes the threads from workers, and signal when all the
# are complete. # threads are complete.
@stop_mutex.synchronize do @stop_mutex.synchronize do
@workers.delete(Thread.current) @workers.delete(Thread.current)
if @workers.size == 0 @stop_cond.signal if @workers.size == 0
@stop_cond.signal
end end
end end
@workers << next_thread
end end
@workers << next_thread
end end
end
# Stops the jobs in the pool # Stops the jobs in the pool
def stop def stop
logger.info('stopping, will wait for all the workers to exit') logger.info('stopping, will wait for all the workers to exit')
@workers.size.times { schedule { throw :exit } } @workers.size.times { schedule { throw :exit } }
@stopped = true @stopped = true
# TODO(temiola): allow configuration of the keepalive period # TODO(temiola): allow configuration of the keepalive period
keep_alive = 5 keep_alive = 5
@stop_mutex.synchronize do @stop_mutex.synchronize do
if @workers.size > 0 @stop_cond.wait(@stop_mutex, keep_alive) if @workers.size > 0
@stop_cond.wait(@stop_mutex, keep_alive)
end end
end
# Forcibly shutdown any threads that are still alive. # Forcibly shutdown any threads that are still alive.
if @workers.size > 0 if @workers.size > 0
logger.warn("forcibly terminating #{@workers.size} worker(s)") logger.warn("forcibly terminating #{@workers.size} worker(s)")
@workers.each do |t| @workers.each do |t|
next unless t.alive? next unless t.alive?
begin begin
t.exit t.exit
rescue StandardError => e rescue StandardError => e
logger.warn('error while terminating a worker') logger.warn('error while terminating a worker')
logger.warn(e) logger.warn(e)
end
end end
end end
end
logger.info('stopped, all workers are shutdown') logger.info('stopped, all workers are shutdown')
end
end end
end protected
protected def rpc_descs
@rpc_descs ||= {}
def rpc_descs end
@rpc_descs ||= {}
end
def rpc_handlers def rpc_handlers
@rpc_handlers ||= {} @rpc_handlers ||= {}
end end
private private
def assert_valid_service_class(cls) def assert_valid_service_class(cls)
if !cls.include?(GenericService) unless cls.include?(GenericService)
raise "#{cls} should 'include GenericService'" fail "#{cls} should 'include GenericService'"
end end
if cls.rpc_descs.size == 0 if cls.rpc_descs.size == 0
raise "#{cls} should specify some rpc descriptions" fail "#{cls} should specify some rpc descriptions"
end
cls.assert_rpc_descs_have_methods
end end
cls.assert_rpc_descs_have_methods
end
def add_rpc_descs_for(service) def add_rpc_descs_for(service)
cls = service.is_a?(Class) ? service : service.class cls = service.is_a?(Class) ? service : service.class
specs = rpc_descs specs = rpc_descs
handlers = rpc_handlers handlers = rpc_handlers
cls.rpc_descs.each_pair do |name,spec| cls.rpc_descs.each_pair do |name, spec|
route = "/#{cls.service_name}/#{name}".to_sym route = "/#{cls.service_name}/#{name}".to_sym
if specs.has_key?(route) if specs.key? route
raise "Cannot add rpc #{route} from #{spec}, already registered" fail "Cannot add rpc #{route} from #{spec}, already registered"
else
specs[route] = spec
if service.is_a?(Class)
handlers[route] = cls.new.method(name.to_s.underscore.to_sym)
else else
handlers[route] = service.method(name.to_s.underscore.to_sym) specs[route] = spec
if service.is_a?(Class)
handlers[route] = cls.new.method(name.to_s.underscore.to_sym)
else
handlers[route] = service.method(name.to_s.underscore.to_sym)
end
logger.info("handling #{route} with #{handlers[route]}")
end end
logger.info("handling #{route} with #{handlers[route]}")
end end
end end
end end
end end
end end

@ -32,7 +32,6 @@ require 'grpc/generic/rpc_desc'
# Extend String to add a method underscore # Extend String to add a method underscore
class String class String
# creates a new string that is the underscore separate version of this one. # creates a new string that is the underscore separate version of this one.
# #
# E.g, # E.g,
@ -40,210 +39,199 @@ class String
# AMethod -> a_method # AMethod -> a_method
# AnRpc -> an_rpc # AnRpc -> an_rpc
def underscore def underscore
word = self.dup word = dup
word.gsub!(/([A-Z]+)([A-Z][a-z])/, '\1_\2') word.gsub!(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
word.gsub!(/([a-z\d])([A-Z])/, '\1_\2') word.gsub!(/([a-z\d])([A-Z])/, '\1_\2')
word.tr!('-', '_') word.tr!('-', '_')
word.downcase! word.downcase!
word word
end end
end end
module Google::RPC module Google
# Google::RPC contains the General RPC module.
# Provides behaviour used to implement schema-derived service classes. module RPC
# # Provides behaviour used to implement schema-derived service classes.
# Is intended to be used to support both client and server IDL-schema-derived
# servers.
module GenericService
# Used to indicate that a name has already been specified
class DuplicateRpcName < StandardError
def initialize(name)
super("rpc (#{name}) is already defined")
end
end
# Provides a simple DSL to describe RPC services.
#
# E.g, a Maths service that uses the serializable messages DivArgs,
# DivReply and Num might define its endpoint uses the following way:
#
# rpc :div DivArgs, DivReply # single request, single response
# rpc :sum stream(Num), Num # streamed input, single response
# rpc :fib FibArgs, stream(Num) # single request, streamed response
# rpc :div_many stream(DivArgs), stream(DivReply)
# # streamed req and resp
# #
# Each 'rpc' adds an RpcDesc to classes including this module, and # Is intended to be used to support both client and server
# #assert_rpc_descs_have_methods is used to ensure the including class # IDL-schema-derived servers.
# provides methods with signatures that support all the descriptors. module GenericService
module Dsl # Used to indicate that a name has already been specified
class DuplicateRpcName < StandardError
# This configures the method names that the serializable message def initialize(name)
# implementation uses to marshal and unmarshal messages. super("rpc (#{name}) is already defined")
# end
# - unmarshal_class method must be a class method on the serializable end
# message type that takes a string (byte stream) and produces and object
#
# - marshal_class_method is called on a serializable message instance
# and produces a serialized string.
#
# The Dsl verifies that the types in the descriptor have both the
# unmarshal and marshal methods.
attr_writer(:marshal_class_method, :unmarshal_class_method)
# This allows configuration of the service name.
attr_accessor(:service_name)
# Adds an RPC spec. # Provides a simple DSL to describe RPC services.
# #
# Takes the RPC name and the classes representing the types to be # E.g, a Maths service that uses the serializable messages DivArgs,
# serialized, and adds them to the including classes rpc_desc hash. # DivReply and Num might define its endpoint uses the following way:
# #
# input and output should both have the methods #marshal and #unmarshal # rpc :div DivArgs, DivReply # single request, single response
# that are responsible for writing and reading an object instance from a # rpc :sum stream(Num), Num # streamed input, single response
# byte buffer respectively. # rpc :fib FibArgs, stream(Num) # single request, streamed response
# rpc :div_many stream(DivArgs), stream(DivReply)
# # streamed req and resp
# #
# @param name [String] the name of the rpc # Each 'rpc' adds an RpcDesc to classes including this module, and
# @param input [Object] the input parameter's class # #assert_rpc_descs_have_methods is used to ensure the including class
# @param output [Object] the output parameter's class # provides methods with signatures that support all the descriptors.
def rpc(name, input, output) module Dsl
raise DuplicateRpcName, name if rpc_descs.has_key?(name) # This configures the method names that the serializable message
assert_can_marshal(input) # implementation uses to marshal and unmarshal messages.
assert_can_marshal(output) #
rpc_descs[name] = RpcDesc.new(name, input, output, # - unmarshal_class method must be a class method on the serializable
marshal_class_method, # message type that takes a string (byte stream) and produces and object
unmarshal_class_method) #
end # - marshal_class_method is called on a serializable message instance
# and produces a serialized string.
def inherited(subclass) #
# Each subclass should have a distinct class variable with its own # The Dsl verifies that the types in the descriptor have both the
# rpc_descs # unmarshal and marshal methods.
subclass.rpc_descs.merge!(rpc_descs) attr_writer(:marshal_class_method, :unmarshal_class_method)
subclass.service_name = service_name
end # This allows configuration of the service name.
attr_accessor(:service_name)
# the name of the instance method used to marshal events to a byte stream.
def marshal_class_method # Adds an RPC spec.
@marshal_class_method ||= :marshal #
end # Takes the RPC name and the classes representing the types to be
# serialized, and adds them to the including classes rpc_desc hash.
#
# input and output should both have the methods #marshal and #unmarshal
# that are responsible for writing and reading an object instance from a
# byte buffer respectively.
#
# @param name [String] the name of the rpc
# @param input [Object] the input parameter's class
# @param output [Object] the output parameter's class
def rpc(name, input, output)
fail(DuplicateRpcName, name) if rpc_descs.key? name
assert_can_marshal(input)
assert_can_marshal(output)
rpc_descs[name] = RpcDesc.new(name, input, output,
marshal_class_method,
unmarshal_class_method)
end
# the name of the class method used to unmarshal from a byte stream. def inherited(subclass)
def unmarshal_class_method # Each subclass should have a distinct class variable with its own
@unmarshal_class_method ||= :unmarshal # rpc_descs
end subclass.rpc_descs.merge!(rpc_descs)
subclass.service_name = service_name
end
def assert_can_marshal(cls) # the name of the instance method used to marshal events to a byte
if cls.is_a?RpcDesc::Stream # stream.
cls = cls.type def marshal_class_method
@marshal_class_method ||= :marshal
end end
mth = unmarshal_class_method # the name of the class method used to unmarshal from a byte stream.
if !cls.methods.include?(mth) def unmarshal_class_method
raise ArgumentError, "#{cls} needs #{cls}.#{mth}" @unmarshal_class_method ||= :unmarshal
end end
mth = marshal_class_method def assert_can_marshal(cls)
if !cls.methods.include?(mth) cls = cls.type if cls.is_a? RpcDesc::Stream
raise ArgumentError, "#{cls} needs #{cls}.#{mth}" mth = unmarshal_class_method
unless cls.methods.include? mth
fail(ArgumentError, "#{cls} needs #{cls}.#{mth}")
end
mth = marshal_class_method
return if cls.methods.include? mth
fail(ArgumentError, "#{cls} needs #{cls}.#{mth}")
end end
end
# @param cls [Class] the class of a serializable type # @param cls [Class] the class of a serializable type
# @return cls wrapped in a RpcDesc::Stream # @return cls wrapped in a RpcDesc::Stream
def stream(cls) def stream(cls)
assert_can_marshal(cls) assert_can_marshal(cls)
RpcDesc::Stream.new(cls) RpcDesc::Stream.new(cls)
end end
# the RpcDescs defined for this GenericService, keyed by name. # the RpcDescs defined for this GenericService, keyed by name.
def rpc_descs def rpc_descs
@rpc_descs ||= {} @rpc_descs ||= {}
end end
# Creates a rpc client class with methods for accessing the methods # Creates a rpc client class with methods for accessing the methods
# currently in rpc_descs. # currently in rpc_descs.
def rpc_stub_class def rpc_stub_class
descs = rpc_descs descs = rpc_descs
route_prefix = service_name route_prefix = service_name
Class.new(ClientStub) do Class.new(ClientStub) do
# @param host [String] the host the stub connects to
# @param host [String] the host the stub connects to # @param kw [KeywordArgs] the channel arguments, plus any optional
# @param kw [KeywordArgs] the channel arguments, plus any optional # args for configuring the client's channel
# args for configuring the client's channel def initialize(host, **kw)
def initialize(host, **kw) super(host, Core::CompletionQueue.new, **kw)
super(host, Core::CompletionQueue.new, **kw) end
end
# Used define_method to add a method for each rpc_desc. Each method # Used define_method to add a method for each rpc_desc. Each method
# calls the base class method for the given descriptor. # calls the base class method for the given descriptor.
descs.each_pair do |name,desc| descs.each_pair do |name, desc|
mth_name = name.to_s.underscore.to_sym mth_name = name.to_s.underscore.to_sym
marshal = desc.marshal_proc marshal = desc.marshal_proc
unmarshal = desc.unmarshal_proc(:output) unmarshal = desc.unmarshal_proc(:output)
route = "/#{route_prefix}/#{name}" route = "/#{route_prefix}/#{name}"
if desc.is_request_response? if desc.request_response?
define_method(mth_name) do |req,deadline=nil| define_method(mth_name) do |req, deadline = nil|
logger.debug("calling #{@host}:#{route}") logger.debug("calling #{@host}:#{route}")
request_response(route, req, marshal, unmarshal, deadline) request_response(route, req, marshal, unmarshal, deadline)
end end
elsif desc.is_client_streamer? elsif desc.client_streamer?
define_method(mth_name) do |reqs,deadline=nil| define_method(mth_name) do |reqs, deadline = nil|
logger.debug("calling #{@host}:#{route}") logger.debug("calling #{@host}:#{route}")
client_streamer(route, reqs, marshal, unmarshal, deadline) client_streamer(route, reqs, marshal, unmarshal, deadline)
end end
elsif desc.is_server_streamer? elsif desc.server_streamer?
define_method(mth_name) do |req,deadline=nil,&blk| define_method(mth_name) do |req, deadline = nil, &blk|
logger.debug("calling #{@host}:#{route}") logger.debug("calling #{@host}:#{route}")
server_streamer(route, req, marshal, unmarshal, deadline, &blk) server_streamer(route, req, marshal, unmarshal, deadline,
end &blk)
else # is a bidi_stream end
define_method(mth_name) do |reqs, deadline=nil,&blk| else # is a bidi_stream
logger.debug("calling #{@host}:#{route}") define_method(mth_name) do |reqs, deadline = nil, &blk|
bidi_streamer(route, reqs, marshal, unmarshal, deadline, &blk) logger.debug("calling #{@host}:#{route}")
bidi_streamer(route, reqs, marshal, unmarshal, deadline, &blk)
end
end end
end end
end end
end end
end # Asserts that the appropriate methods are defined for each added rpc
# spec. Is intended to aid verifying that server classes are correctly
# Asserts that the appropriate methods are defined for each added rpc # implemented.
# spec. Is intended to aid verifying that server classes are correctly def assert_rpc_descs_have_methods
# implemented. rpc_descs.each_pair do |m, spec|
def assert_rpc_descs_have_methods mth_name = m.to_s.underscore.to_sym
rpc_descs.each_pair do |m,spec| unless instance_methods.include?(mth_name)
mth_name = m.to_s.underscore.to_sym fail "#{self} does not provide instance method '#{mth_name}'"
if !self.instance_methods.include?(mth_name) end
raise "#{self} does not provide instance method '#{mth_name}'" spec.assert_arity_matches(instance_method(mth_name))
end end
spec.assert_arity_matches(self.instance_method(mth_name))
end end
end end
end def self.included(o)
o.extend(Dsl)
def self.included(o) # Update to the use the service name including module. Proivde a default
o.extend(Dsl) # that can be nil e,g. when modules are declared dynamically.
return unless o.service_name.nil?
# Update to the use the service name including module. Proivde a default if o.name.nil?
# that can be nil e,g. when modules are declared dynamically. o.service_name = 'GenericService'
return unless o.service_name.nil?
if o.name.nil?
o.service_name = 'GenericService'
else
modules = o.name.split('::')
if modules.length > 2
o.service_name = modules[modules.length - 2]
else else
o.service_name = modules.first modules = o.name.split('::')
if modules.length > 2
o.service_name = modules[modules.length - 2]
else
o.service_name = modules.first
end
end end
end end
end end
end end
end end

@ -28,6 +28,7 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Google module Google
# Google::RPC contains the General RPC module.
module RPC module RPC
VERSION = '0.0.1' VERSION = '0.0.1'
end end

@ -30,7 +30,6 @@
require 'grpc' require 'grpc'
describe 'Wrapped classes where .new cannot create an instance' do describe 'Wrapped classes where .new cannot create an instance' do
describe GRPC::Core::Event do describe GRPC::Core::Event do
it 'should fail .new fail with a runtime error' do it 'should fail .new fail with a runtime error' do
expect { GRPC::Core::Event.new }.to raise_error(TypeError) expect { GRPC::Core::Event.new }.to raise_error(TypeError)
@ -42,5 +41,4 @@ describe 'Wrapped classes where .new cannot create an instance' do
expect { GRPC::Core::Event.new }.to raise_error(TypeError) expect { GRPC::Core::Event.new }.to raise_error(TypeError)
end end
end end
end end

@ -30,9 +30,7 @@
require 'grpc' require 'grpc'
describe GRPC::Core::ByteBuffer do describe GRPC::Core::ByteBuffer do
describe '#new' do describe '#new' do
it 'is constructed from a string' do it 'is constructed from a string' do
expect { GRPC::Core::ByteBuffer.new('#new') }.not_to raise_error expect { GRPC::Core::ByteBuffer.new('#new') }.not_to raise_error
end end
@ -50,7 +48,6 @@ describe GRPC::Core::ByteBuffer do
expect { GRPC::Core::ByteBuffer.new(x) }.to raise_error TypeError expect { GRPC::Core::ByteBuffer.new(x) }.to raise_error TypeError
end end
end end
end end
describe '#to_s' do describe '#to_s' do
@ -67,5 +64,4 @@ describe GRPC::Core::ByteBuffer do
expect(a_copy.dup.to_s).to eq('#dup') expect(a_copy.dup.to_s).to eq('#dup')
end end
end end
end end

@ -33,30 +33,29 @@ require 'port_picker'
include GRPC::Core::StatusCodes include GRPC::Core::StatusCodes
describe GRPC::Core::RpcErrors do describe GRPC::Core::RpcErrors do
before(:each) do before(:each) do
@known_types = { @known_types = {
:OK => 0, OK: 0,
:ERROR => 1, ERROR: 1,
:NOT_ON_SERVER => 2, NOT_ON_SERVER: 2,
:NOT_ON_CLIENT => 3, NOT_ON_CLIENT: 3,
:ALREADY_ACCEPTED => 4, ALREADY_ACCEPTED: 4,
:ALREADY_INVOKED => 5, ALREADY_INVOKED: 5,
:NOT_INVOKED => 6, NOT_INVOKED: 6,
:ALREADY_FINISHED => 7, ALREADY_FINISHED: 7,
:TOO_MANY_OPERATIONS => 8, TOO_MANY_OPERATIONS: 8,
:INVALID_FLAGS => 9, INVALID_FLAGS: 9,
:ErrorMessages => { ErrorMessages: {
0=>'ok', 0 => 'ok',
1=>'unknown error', 1 => 'unknown error',
2=>'not available on a server', 2 => 'not available on a server',
3=>'not available on a client', 3 => 'not available on a client',
4=>'call is already accepted', 4 => 'call is already accepted',
5=>'call is already invoked', 5 => 'call is already invoked',
6=>'call is not yet invoked', 6 => 'call is not yet invoked',
7=>'call is already finished', 7 => 'call is already finished',
8=>'outstanding read or write present', 8 => 'outstanding read or write present',
9=>'a bad flag was given', 9 => 'a bad flag was given'
} }
} }
end end
@ -66,11 +65,9 @@ describe GRPC::Core::RpcErrors do
syms_and_codes = m.constants.collect { |c| [c, m.const_get(c)] } syms_and_codes = m.constants.collect { |c| [c, m.const_get(c)] }
expect(Hash[syms_and_codes]).to eq(@known_types) expect(Hash[syms_and_codes]).to eq(@known_types)
end end
end end
describe GRPC::Core::Call do describe GRPC::Core::Call do
before(:each) do before(:each) do
@tag = Object.new @tag = Object.new
@client_queue = GRPC::Core::CompletionQueue.new @client_queue = GRPC::Core::CompletionQueue.new
@ -88,7 +85,7 @@ describe GRPC::Core::Call do
describe '#start_read' do describe '#start_read' do
it 'should fail if called immediately' do it 'should fail if called immediately' do
blk = Proc.new { make_test_call.start_read(@tag) } blk = proc { make_test_call.start_read(@tag) }
expect(&blk).to raise_error GRPC::Core::CallError expect(&blk).to raise_error GRPC::Core::CallError
end end
end end
@ -96,21 +93,21 @@ describe GRPC::Core::Call do
describe '#start_write' do describe '#start_write' do
it 'should fail if called immediately' do it 'should fail if called immediately' do
bytes = GRPC::Core::ByteBuffer.new('test string') bytes = GRPC::Core::ByteBuffer.new('test string')
blk = Proc.new { make_test_call.start_write(bytes, @tag) } blk = proc { make_test_call.start_write(bytes, @tag) }
expect(&blk).to raise_error GRPC::Core::CallError expect(&blk).to raise_error GRPC::Core::CallError
end end
end end
describe '#start_write_status' do describe '#start_write_status' do
it 'should fail if called immediately' do it 'should fail if called immediately' do
blk = Proc.new { make_test_call.start_write_status(153, 'x', @tag) } blk = proc { make_test_call.start_write_status(153, 'x', @tag) }
expect(&blk).to raise_error GRPC::Core::CallError expect(&blk).to raise_error GRPC::Core::CallError
end end
end end
describe '#writes_done' do describe '#writes_done' do
it 'should fail if called immediately' do it 'should fail if called immediately' do
blk = Proc.new { make_test_call.writes_done(Object.new) } blk = proc { make_test_call.writes_done(Object.new) }
expect(&blk).to raise_error GRPC::Core::CallError expect(&blk).to raise_error GRPC::Core::CallError
end end
end end
@ -119,7 +116,8 @@ describe GRPC::Core::Call do
it 'adds metadata to a call without fail' do it 'adds metadata to a call without fail' do
call = make_test_call call = make_test_call
n = 37 n = 37
metadata = Hash[n.times.collect { |i| ["key%d" % i, "value%d" %i] } ] one_md = proc { |x| [sprintf('key%d', x), sprintf('value%d', x)] }
metadata = Hash[n.times.collect { |i| one_md.call i }]
expect { call.add_metadata(metadata) }.to_not raise_error expect { call.add_metadata(metadata) }.to_not raise_error
end end
end end
@ -174,7 +172,7 @@ describe GRPC::Core::Call do
describe '#metadata' do describe '#metadata' do
it 'can save the metadata hash and read it back' do it 'can save the metadata hash and read it back' do
call = make_test_call call = make_test_call
md = {'k1' => 'v1', 'k2' => 'v2'} md = { 'k1' => 'v1', 'k2' => 'v2' }
expect { call.metadata = md }.not_to raise_error expect { call.metadata = md }.not_to raise_error
expect(call.metadata).to be(md) expect(call.metadata).to be(md)
end end
@ -191,7 +189,6 @@ describe GRPC::Core::Call do
end end
end end
def make_test_call def make_test_call
@ch.create_call('dummy_method', 'dummy_host', deadline) @ch.create_call('dummy_method', 'dummy_host', deadline)
end end
@ -199,5 +196,4 @@ describe GRPC::Core::Call do
def deadline def deadline
Time.now + 2 # in 2 seconds; arbitrary Time.now + 2 # in 2 seconds; arbitrary
end end
end end

@ -37,8 +37,6 @@ def load_test_certs
end end
describe GRPC::Core::Channel do describe GRPC::Core::Channel do
def create_test_cert def create_test_cert
GRPC::Core::Credentials.new(load_test_certs[0]) GRPC::Core::Credentials.new(load_test_certs[0])
end end
@ -48,7 +46,6 @@ describe GRPC::Core::Channel do
end end
shared_examples '#new' do shared_examples '#new' do
it 'take a host name without channel args' do it 'take a host name without channel args' do
expect { GRPC::Core::Channel.new('dummy_host', nil) }.not_to raise_error expect { GRPC::Core::Channel.new('dummy_host', nil) }.not_to raise_error
end end
@ -61,14 +58,14 @@ describe GRPC::Core::Channel do
end end
it 'does not take a hash with bad values as channel args' do it 'does not take a hash with bad values as channel args' do
blk = construct_with_args(:symbol => Object.new) blk = construct_with_args(symbol: Object.new)
expect(&blk).to raise_error TypeError expect(&blk).to raise_error TypeError
blk = construct_with_args('1' => Hash.new) blk = construct_with_args('1' => Hash.new)
expect(&blk).to raise_error TypeError expect(&blk).to raise_error TypeError
end end
it 'can take a hash with a symbol key as channel args' do it 'can take a hash with a symbol key as channel args' do
blk = construct_with_args(:a_symbol => 1) blk = construct_with_args(a_symbol: 1)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
@ -78,32 +75,30 @@ describe GRPC::Core::Channel do
end end
it 'can take a hash with a string value as channel args' do it 'can take a hash with a string value as channel args' do
blk = construct_with_args(:a_symbol => '1') blk = construct_with_args(a_symbol: '1')
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'can take a hash with a symbol value as channel args' do it 'can take a hash with a symbol value as channel args' do
blk = construct_with_args(:a_symbol => :another_symbol) blk = construct_with_args(a_symbol: :another_symbol)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'can take a hash with a numeric value as channel args' do it 'can take a hash with a numeric value as channel args' do
blk = construct_with_args(:a_symbol => 1) blk = construct_with_args(a_symbol: 1)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'can take a hash with many args as channel args' do it 'can take a hash with many args as channel args' do
args = Hash[127.times.collect { |x| [x.to_s, x] } ] args = Hash[127.times.collect { |x| [x.to_s, x] }]
blk = construct_with_args(args) blk = construct_with_args(args)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
describe '#new for secure channels' do describe '#new for secure channels' do
def construct_with_args(a) def construct_with_args(a)
Proc.new { GRPC::Core::Channel.new('dummy_host', a, create_test_cert) } proc { GRPC::Core::Channel.new('dummy_host', a, create_test_cert) }
end end
it_behaves_like '#new' it_behaves_like '#new'
@ -113,7 +108,7 @@ describe GRPC::Core::Channel do
it_behaves_like '#new' it_behaves_like '#new'
def construct_with_args(a) def construct_with_args(a)
Proc.new { GRPC::Core::Channel.new('dummy_host', a) } proc { GRPC::Core::Channel.new('dummy_host', a) }
end end
end end
@ -125,7 +120,7 @@ describe GRPC::Core::Channel do
deadline = Time.now + 5 deadline = Time.now + 5
blk = Proc.new do blk = proc do
ch.create_call('dummy_method', 'dummy_host', deadline) ch.create_call('dummy_method', 'dummy_host', deadline)
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -138,12 +133,11 @@ describe GRPC::Core::Channel do
ch.close ch.close
deadline = Time.now + 5 deadline = Time.now + 5
blk = Proc.new do blk = proc do
ch.create_call('dummy_method', 'dummy_host', deadline) ch.create_call('dummy_method', 'dummy_host', deadline)
end end
expect(&blk).to raise_error(RuntimeError) expect(&blk).to raise_error(RuntimeError)
end end
end end
describe '#destroy' do describe '#destroy' do
@ -151,7 +145,7 @@ describe GRPC::Core::Channel do
port = find_unused_tcp_port port = find_unused_tcp_port
host = "localhost:#{port}" host = "localhost:#{port}"
ch = GRPC::Core::Channel.new(host, nil) ch = GRPC::Core::Channel.new(host, nil)
blk = Proc.new { ch.destroy } blk = proc { ch.destroy }
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
@ -159,18 +153,16 @@ describe GRPC::Core::Channel do
port = find_unused_tcp_port port = find_unused_tcp_port
host = "localhost:#{port}" host = "localhost:#{port}"
ch = GRPC::Core::Channel.new(host, nil) ch = GRPC::Core::Channel.new(host, nil)
blk = Proc.new { ch.destroy } blk = proc { ch.destroy }
blk.call blk.call
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
describe '::SSL_TARGET' do describe '::SSL_TARGET' do
it 'is a symbol' do it 'is a symbol' do
expect(GRPC::Core::Channel::SSL_TARGET).to be_a(Symbol) expect(GRPC::Core::Channel::SSL_TARGET).to be_a(Symbol)
end end
end end
describe '#close' do describe '#close' do
@ -178,7 +170,7 @@ describe GRPC::Core::Channel do
port = find_unused_tcp_port port = find_unused_tcp_port
host = "localhost:#{port}" host = "localhost:#{port}"
ch = GRPC::Core::Channel.new(host, nil) ch = GRPC::Core::Channel.new(host, nil)
blk = Proc.new { ch.close } blk = proc { ch.close }
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
@ -186,10 +178,9 @@ describe GRPC::Core::Channel do
port = find_unused_tcp_port port = find_unused_tcp_port
host = "localhost:#{port}" host = "localhost:#{port}"
ch = GRPC::Core::Channel.new(host, nil) ch = GRPC::Core::Channel.new(host, nil)
blk = Proc.new { ch.close } blk = proc { ch.close }
blk.call blk.call
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
end end

@ -41,7 +41,6 @@ def load_test_certs
end end
shared_context 'setup: tags' do shared_context 'setup: tags' do
before(:example) do before(:example) do
@server_finished_tag = Object.new @server_finished_tag = Object.new
@client_finished_tag = Object.new @client_finished_tag = Object.new
@ -71,7 +70,7 @@ shared_context 'setup: tags' do
expect(ev).not_to be_nil expect(ev).not_to be_nil
expect(ev.type).to be(SERVER_RPC_NEW) expect(ev.type).to be(SERVER_RPC_NEW)
ev.call.server_accept(@server_queue, @server_finished_tag) ev.call.server_accept(@server_queue, @server_finished_tag)
ev.call.server_end_initial_metadata() ev.call.server_end_initial_metadata
ev.call.start_read(@server_tag) ev.call.start_read(@server_tag)
ev = @server_queue.pluck(@server_tag, TimeConsts::INFINITE_FUTURE) ev = @server_queue.pluck(@server_tag, TimeConsts::INFINITE_FUTURE)
expect(ev.type).to be(READ) expect(ev.type).to be(READ)
@ -79,10 +78,10 @@ shared_context 'setup: tags' do
ev = @server_queue.pluck(@server_tag, TimeConsts::INFINITE_FUTURE) ev = @server_queue.pluck(@server_tag, TimeConsts::INFINITE_FUTURE)
expect(ev).not_to be_nil expect(ev).not_to be_nil
expect(ev.type).to be(WRITE_ACCEPTED) expect(ev.type).to be(WRITE_ACCEPTED)
return ev.call ev.call
end end
def client_sends(call, sent='a message') def client_sends(call, sent = 'a message')
req = ByteBuffer.new(sent) req = ByteBuffer.new(sent)
call.start_invoke(@client_queue, @tag, @tag, @client_finished_tag) call.start_invoke(@client_queue, @tag, @tag, @client_finished_tag)
ev = @client_queue.pluck(@tag, TimeConsts::INFINITE_FUTURE) ev = @client_queue.pluck(@tag, TimeConsts::INFINITE_FUTURE)
@ -92,17 +91,15 @@ shared_context 'setup: tags' do
ev = @client_queue.pluck(@tag, TimeConsts::INFINITE_FUTURE) ev = @client_queue.pluck(@tag, TimeConsts::INFINITE_FUTURE)
expect(ev).not_to be_nil expect(ev).not_to be_nil
expect(ev.type).to be(WRITE_ACCEPTED) expect(ev.type).to be(WRITE_ACCEPTED)
return sent sent
end end
def new_client_call def new_client_call
@ch.create_call('/method', 'localhost', deadline) @ch.create_call('/method', 'localhost', deadline)
end end
end end
shared_examples 'basic GRPC message delivery is OK' do shared_examples 'basic GRPC message delivery is OK' do
include_context 'setup: tags' include_context 'setup: tags'
it 'servers receive requests from clients and start responding' do it 'servers receive requests from clients and start responding' do
@ -126,7 +123,7 @@ shared_examples 'basic GRPC message delivery is OK' do
# the server response # the server response
server_call.start_write(reply, @server_tag) server_call.start_write(reply, @server_tag)
ev = expect_next_event_on(@server_queue, WRITE_ACCEPTED, @server_tag) expect_next_event_on(@server_queue, WRITE_ACCEPTED, @server_tag)
end end
it 'responses written by servers are received by the client' do it 'responses written by servers are received by the client' do
@ -135,15 +132,14 @@ shared_examples 'basic GRPC message delivery is OK' do
server_receives_and_responds_with('server_response') server_receives_and_responds_with('server_response')
call.start_read(@tag) call.start_read(@tag)
ev = expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag) expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag)
ev = expect_next_event_on(@client_queue, READ, @tag) ev = expect_next_event_on(@client_queue, READ, @tag)
expect(ev.result.to_s).to eq('server_response') expect(ev.result.to_s).to eq('server_response')
end end
it 'servers can ignore a client write and send a status' do it 'servers can ignore a client write and send a status' do
reply = ByteBuffer.new('the server payload')
call = new_client_call call = new_client_call
msg = client_sends(call) client_sends(call)
# check the server rpc new was received # check the server rpc new was received
@server.request_call(@server_tag) @server.request_call(@server_tag)
@ -153,20 +149,20 @@ shared_examples 'basic GRPC message delivery is OK' do
# accept the call - need to do this to sent status. # accept the call - need to do this to sent status.
server_call = ev.call server_call = ev.call
server_call.server_accept(@server_queue, @server_finished_tag) server_call.server_accept(@server_queue, @server_finished_tag)
server_call.server_end_initial_metadata() server_call.server_end_initial_metadata
server_call.start_write_status(StatusCodes::NOT_FOUND, 'not found', server_call.start_write_status(StatusCodes::NOT_FOUND, 'not found',
@server_tag) @server_tag)
# client gets an empty response for the read, preceeded by some metadata. # client gets an empty response for the read, preceeded by some metadata.
call.start_read(@tag) call.start_read(@tag)
ev = expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag) expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag)
ev = expect_next_event_on(@client_queue, READ, @tag) ev = expect_next_event_on(@client_queue, READ, @tag)
expect(ev.tag).to be(@tag) expect(ev.tag).to be(@tag)
expect(ev.result.to_s).to eq('') expect(ev.result.to_s).to eq('')
# finally, after client sends writes_done, they get the finished. # finally, after client sends writes_done, they get the finished.
call.writes_done(@tag) call.writes_done(@tag)
ev = expect_next_event_on(@client_queue, FINISH_ACCEPTED, @tag) expect_next_event_on(@client_queue, FINISH_ACCEPTED, @tag)
ev = expect_next_event_on(@client_queue, FINISHED, @client_finished_tag) ev = expect_next_event_on(@client_queue, FINISHED, @client_finished_tag)
expect(ev.result.code).to eq(StatusCodes::NOT_FOUND) expect(ev.result.code).to eq(StatusCodes::NOT_FOUND)
end end
@ -175,12 +171,12 @@ shared_examples 'basic GRPC message delivery is OK' do
call = new_client_call call = new_client_call
client_sends(call) client_sends(call)
server_call = server_receives_and_responds_with('server_response') server_call = server_receives_and_responds_with('server_response')
server_call.start_write_status(10101, 'status code is 10101', @server_tag) server_call.start_write_status(10_101, 'status code is 10101', @server_tag)
# first the client says writes are done # first the client says writes are done
call.start_read(@tag) call.start_read(@tag)
ev = expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag) expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag)
ev = expect_next_event_on(@client_queue, READ, @tag) expect_next_event_on(@client_queue, READ, @tag)
call.writes_done(@tag) call.writes_done(@tag)
# but nothing happens until the server sends a status # but nothing happens until the server sends a status
@ -192,24 +188,23 @@ shared_examples 'basic GRPC message delivery is OK' do
expect_next_event_on(@client_queue, FINISH_ACCEPTED, @tag) expect_next_event_on(@client_queue, FINISH_ACCEPTED, @tag)
ev = expect_next_event_on(@client_queue, FINISHED, @client_finished_tag) ev = expect_next_event_on(@client_queue, FINISHED, @client_finished_tag)
expect(ev.result.details).to eq('status code is 10101') expect(ev.result.details).to eq('status code is 10101')
expect(ev.result.code).to eq(10101) expect(ev.result.code).to eq(10_101)
end end
end end
shared_examples 'GRPC metadata delivery works OK' do shared_examples 'GRPC metadata delivery works OK' do
include_context 'setup: tags' include_context 'setup: tags'
describe 'from client => server' do describe 'from client => server' do
before(:example) do before(:example) do
n = 7 # arbitrary number of metadata n = 7 # arbitrary number of metadata
diff_keys = Hash[n.times.collect { |i| ['k%d' % i, 'v%d' % i] }] diff_keys_fn = proc { |i| [sprintf('k%d', i), sprintf('v%d', i)] }
null_vals = Hash[n.times.collect { |i| ['k%d' % i, 'v\0%d' % i] }] diff_keys = Hash[n.times.collect { |x| diff_keys_fn.call x }]
same_keys = Hash[n.times.collect { |i| ['k%d' % i, ['v%d' % i] * n] }] null_vals_fn = proc { |i| [sprintf('k%d', i), sprintf('v\0%d', i)] }
symbol_key = {:a_key => 'a val'} null_vals = Hash[n.times.collect { |x| null_vals_fn.call x }]
same_keys_fn = proc { |i| [sprintf('k%d', i), [sprintf('v%d', i)] * n] }
same_keys = Hash[n.times.collect { |x| same_keys_fn.call x }]
symbol_key = { a_key: 'a val' }
@valid_metadata = [diff_keys, same_keys, null_vals, symbol_key] @valid_metadata = [diff_keys, same_keys, null_vals, symbol_key]
@bad_keys = [] @bad_keys = []
@bad_keys << { Object.new => 'a value' } @bad_keys << { Object.new => 'a value' }
@ -239,28 +234,29 @@ shared_examples 'GRPC metadata delivery works OK' do
# Client begins a call OK # Client begins a call OK
call.start_invoke(@client_queue, @tag, @tag, @client_finished_tag) call.start_invoke(@client_queue, @tag, @tag, @client_finished_tag)
ev = expect_next_event_on(@client_queue, INVOKE_ACCEPTED, @tag) expect_next_event_on(@client_queue, INVOKE_ACCEPTED, @tag)
# ... server has all metadata available even though the client did not # ... server has all metadata available even though the client did not
# send a write # send a write
@server.request_call(@server_tag) @server.request_call(@server_tag)
ev = expect_next_event_on(@server_queue, SERVER_RPC_NEW, @server_tag) ev = expect_next_event_on(@server_queue, SERVER_RPC_NEW, @server_tag)
replace_symbols = Hash[md.each_pair.collect { |x,y| [x.to_s, y] }] replace_symbols = Hash[md.each_pair.collect { |x, y| [x.to_s, y] }]
result = ev.result.metadata result = ev.result.metadata
expect(result.merge(replace_symbols)).to eq(result) expect(result.merge(replace_symbols)).to eq(result)
end end
end end
end end
describe 'from server => client' do describe 'from server => client' do
before(:example) do before(:example) do
n = 7 # arbitrary number of metadata n = 7 # arbitrary number of metadata
diff_keys = Hash[n.times.collect { |i| ['k%d' % i, 'v%d' % i] }] diff_keys_fn = proc { |i| [sprintf('k%d', i), sprintf('v%d', i)] }
null_vals = Hash[n.times.collect { |i| ['k%d' % i, 'v\0%d' % i] }] diff_keys = Hash[n.times.collect { |x| diff_keys_fn.call x }]
same_keys = Hash[n.times.collect { |i| ['k%d' % i, ['v%d' % i] * n] }] null_vals_fn = proc { |i| [sprintf('k%d', i), sprintf('v\0%d', i)] }
symbol_key = {:a_key => 'a val'} null_vals = Hash[n.times.collect { |x| null_vals_fn.call x }]
same_keys_fn = proc { |i| [sprintf('k%d', i), [sprintf('v%d', i)] * n] }
same_keys = Hash[n.times.collect { |x| same_keys_fn.call x }]
symbol_key = { a_key: 'a val' }
@valid_metadata = [diff_keys, same_keys, null_vals, symbol_key] @valid_metadata = [diff_keys, same_keys, null_vals, symbol_key]
@bad_keys = [] @bad_keys = []
@bad_keys << { Object.new => 'a value' } @bad_keys << { Object.new => 'a value' }
@ -290,7 +286,7 @@ shared_examples 'GRPC metadata delivery works OK' do
# ... server accepts the call without adding metadata # ... server accepts the call without adding metadata
server_call.server_accept(@server_queue, @server_finished_tag) server_call.server_accept(@server_queue, @server_finished_tag)
server_call.server_end_initial_metadata() server_call.server_end_initial_metadata
# ... these server sends some data, allowing the metadata read # ... these server sends some data, allowing the metadata read
server_call.start_write(ByteBuffer.new('reply with metadata'), server_call.start_write(ByteBuffer.new('reply with metadata'),
@ -300,7 +296,7 @@ shared_examples 'GRPC metadata delivery works OK' do
# there is the HTTP status metadata, though there should not be any # there is the HTTP status metadata, though there should not be any
# TODO(temiola): update this with the bug number to be resolved # TODO(temiola): update this with the bug number to be resolved
ev = expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag) ev = expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag)
expect(ev.result).to eq({':status' => '200'}) expect(ev.result).to eq(':status' => '200')
end end
it 'sends all the pairs and status:200 when keys and values are valid' do it 'sends all the pairs and status:200 when keys and values are valid' do
@ -316,24 +312,19 @@ shared_examples 'GRPC metadata delivery works OK' do
# ... server adds metadata and accepts the call # ... server adds metadata and accepts the call
server_call.add_metadata(md) server_call.add_metadata(md)
server_call.server_accept(@server_queue, @server_finished_tag) server_call.server_accept(@server_queue, @server_finished_tag)
server_call.server_end_initial_metadata() server_call.server_end_initial_metadata
# Now the client can read the metadata # Now the client can read the metadata
ev = expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag) ev = expect_next_event_on(@client_queue, CLIENT_METADATA_READ, @tag)
replace_symbols = Hash[md.each_pair.collect { |x,y| [x.to_s, y] }] replace_symbols = Hash[md.each_pair.collect { |x, y| [x.to_s, y] }]
replace_symbols[':status'] = '200' replace_symbols[':status'] = '200'
expect(ev.result).to eq(replace_symbols) expect(ev.result).to eq(replace_symbols)
end end
end end
end end
end end
describe 'the http client/server' do describe 'the http client/server' do
before(:example) do before(:example) do
port = find_unused_tcp_port port = find_unused_tcp_port
host = "localhost:#{port}" host = "localhost:#{port}"
@ -354,11 +345,9 @@ describe 'the http client/server' do
it_behaves_like 'GRPC metadata delivery works OK' do it_behaves_like 'GRPC metadata delivery works OK' do
end end
end end
describe 'the secure http client/server' do describe 'the secure http client/server' do
before(:example) do before(:example) do
certs = load_test_certs certs = load_test_certs
port = find_unused_tcp_port port = find_unused_tcp_port
@ -369,7 +358,7 @@ describe 'the secure http client/server' do
@server = GRPC::Core::Server.new(@server_queue, nil, server_creds) @server = GRPC::Core::Server.new(@server_queue, nil, server_creds)
@server.add_http2_port(host, true) @server.add_http2_port(host, true)
@server.start @server.start
args = {Channel::SSL_TARGET => 'foo.test.google.com'} args = { Channel::SSL_TARGET => 'foo.test.google.com' }
@ch = Channel.new(host, args, @ch = Channel.new(host, args,
GRPC::Core::Credentials.new(certs[0], nil, nil)) GRPC::Core::Credentials.new(certs[0], nil, nil))
end end
@ -383,5 +372,4 @@ describe 'the secure http client/server' do
it_behaves_like 'GRPC metadata delivery works OK' do it_behaves_like 'GRPC metadata delivery works OK' do
end end
end end

@ -30,7 +30,6 @@
require 'grpc' require 'grpc'
describe GRPC::Core::CompletionQueue do describe GRPC::Core::CompletionQueue do
describe '#new' do describe '#new' do
it 'is constructed successufully' do it 'is constructed successufully' do
expect { GRPC::Core::CompletionQueue.new }.not_to raise_error expect { GRPC::Core::CompletionQueue.new }.not_to raise_error
@ -53,7 +52,6 @@ describe GRPC::Core::CompletionQueue do
expect { ch.next(a_time) }.not_to raise_error expect { ch.next(a_time) }.not_to raise_error
end end
end end
end end
describe '#pluck' do describe '#pluck' do
@ -74,8 +72,5 @@ describe GRPC::Core::CompletionQueue do
expect { ch.pluck(tag, a_time) }.not_to raise_error expect { ch.pluck(tag, a_time) }.not_to raise_error
end end
end end
end end
end end

@ -29,7 +29,6 @@
require 'grpc' require 'grpc'
def load_test_certs def load_test_certs
test_root = File.join(File.dirname(__FILE__), 'testdata') test_root = File.join(File.dirname(__FILE__), 'testdata')
files = ['ca.pem', 'server1.pem', 'server1.key'] files = ['ca.pem', 'server1.pem', 'server1.key']
@ -39,9 +38,7 @@ end
Credentials = GRPC::Core::Credentials Credentials = GRPC::Core::Credentials
describe Credentials do describe Credentials do
describe '#new' do describe '#new' do
it 'can be constructed with fake inputs' do it 'can be constructed with fake inputs' do
expect { Credentials.new('root_certs', 'key', 'cert') }.not_to raise_error expect { Credentials.new('root_certs', 'key', 'cert') }.not_to raise_error
end end
@ -58,30 +55,23 @@ describe Credentials do
it 'cannot be constructed with a nil server roots' do it 'cannot be constructed with a nil server roots' do
_, client_key, client_chain = load_test_certs _, client_key, client_chain = load_test_certs
blk = Proc.new { Credentials.new(nil, client_key, client_chain) } blk = proc { Credentials.new(nil, client_key, client_chain) }
expect(&blk).to raise_error expect(&blk).to raise_error
end end
end end
describe '#compose' do describe '#compose' do
it 'can be completed OK' do it 'can be completed OK' do
certs = load_test_certs certs = load_test_certs
cred1 = Credentials.new(*certs) cred1 = Credentials.new(*certs)
cred2 = Credentials.new(*certs) cred2 = Credentials.new(*certs)
expect { cred1.compose(cred2) }.to_not raise_error expect { cred1.compose(cred2) }.to_not raise_error
end end
end end
describe 'Credentials#default' do describe 'Credentials#default' do
it 'is not implemented yet' do it 'is not implemented yet' do
expect { Credentials.default() }.to raise_error RuntimeError expect { Credentials.default }.to raise_error RuntimeError
end end
end end
end end

@ -30,25 +30,23 @@
require 'grpc' require 'grpc'
describe GRPC::Core::CompletionType do describe GRPC::Core::CompletionType do
before(:each) do before(:each) do
@known_types = { @known_types = {
:QUEUE_SHUTDOWN => 0, QUEUE_SHUTDOWN: 0,
:READ => 1, READ: 1,
:INVOKE_ACCEPTED => 2, INVOKE_ACCEPTED: 2,
:WRITE_ACCEPTED => 3, WRITE_ACCEPTED: 3,
:FINISH_ACCEPTED => 4, FINISH_ACCEPTED: 4,
:CLIENT_METADATA_READ => 5, CLIENT_METADATA_READ: 5,
:FINISHED => 6, FINISHED: 6,
:SERVER_RPC_NEW => 7, SERVER_RPC_NEW: 7,
:RESERVED => 8 RESERVED: 8
} }
end end
it 'should have all the known types' do it 'should have all the known types' do
mod = GRPC::Core::CompletionType mod = GRPC::Core::CompletionType
blk = Proc.new { Hash[mod.constants.collect { |c| [c, mod.const_get(c)] }] } blk = proc { Hash[mod.constants.collect { |c| [c, mod.const_get(c)] }] }
expect(blk.call).to eq(@known_types) expect(blk.call).to eq(@known_types)
end end
end end

@ -38,9 +38,9 @@ describe GRPC::ActiveCall do
CompletionType = GRPC::Core::CompletionType CompletionType = GRPC::Core::CompletionType
before(:each) do before(:each) do
@pass_through = Proc.new { |x| x } @pass_through = proc { |x| x }
@server_tag = Object.new @server_tag = Object.new
@server_done_tag, meta_tag = Object.new @server_done_tag = Object.new
@tag = Object.new @tag = Object.new
@client_queue = GRPC::Core::CompletionQueue.new @client_queue = GRPC::Core::CompletionQueue.new
@ -70,7 +70,7 @@ describe GRPC::ActiveCall do
describe '#multi_req_view' do describe '#multi_req_view' do
it 'exposes a fixed subset of the ActiveCall methods' do it 'exposes a fixed subset of the ActiveCall methods' do
want = ['cancelled', 'deadline', 'each_remote_read', 'shutdown'] want = %w(cancelled, deadline, each_remote_read, shutdown)
v = @client_call.multi_req_view v = @client_call.multi_req_view
want.each do |w| want.each do |w|
expect(v.methods.include?(w)) expect(v.methods.include?(w))
@ -80,7 +80,7 @@ describe GRPC::ActiveCall do
describe '#single_req_view' do describe '#single_req_view' do
it 'exposes a fixed subset of the ActiveCall methods' do it 'exposes a fixed subset of the ActiveCall methods' do
want = ['cancelled', 'deadline', 'shutdown'] want = %w(cancelled, deadline, shutdown)
v = @client_call.single_req_view v = @client_call.single_req_view
want.each do |w| want.each do |w|
expect(v.methods.include?(w)) expect(v.methods.include?(w))
@ -110,7 +110,7 @@ describe GRPC::ActiveCall do
# Accept the call, and verify that the server reads the response ok. # Accept the call, and verify that the server reads the response ok.
ev.call.server_accept(@client_queue, @server_tag) ev.call.server_accept(@client_queue, @server_tag)
ev.call.server_end_initial_metadata() ev.call.server_end_initial_metadata
server_call = ActiveCall.new(ev.call, @client_queue, @pass_through, server_call = ActiveCall.new(ev.call, @client_queue, @pass_through,
@pass_through, deadline) @pass_through, deadline)
expect(server_call.remote_read).to eq(msg) expect(server_call.remote_read).to eq(msg)
@ -120,7 +120,7 @@ describe GRPC::ActiveCall do
call = make_test_call call = make_test_call
done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue, done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue,
deadline) deadline)
marshal = Proc.new { |x| 'marshalled:' + x } marshal = proc { |x| 'marshalled:' + x }
client_call = ActiveCall.new(call, @client_queue, marshal, client_call = ActiveCall.new(call, @client_queue, marshal,
@pass_through, deadline, @pass_through, deadline,
finished_tag: done_tag, finished_tag: done_tag,
@ -132,33 +132,29 @@ describe GRPC::ActiveCall do
@server.request_call(@server_tag) @server.request_call(@server_tag)
ev = @server_queue.next(deadline) ev = @server_queue.next(deadline)
ev.call.server_accept(@client_queue, @server_tag) ev.call.server_accept(@client_queue, @server_tag)
ev.call.server_end_initial_metadata() ev.call.server_end_initial_metadata
server_call = ActiveCall.new(ev.call, @client_queue, @pass_through, server_call = ActiveCall.new(ev.call, @client_queue, @pass_through,
@pass_through, deadline) @pass_through, deadline)
expect(server_call.remote_read).to eq('marshalled:' + msg) expect(server_call.remote_read).to eq('marshalled:' + msg)
end end
end end
describe '#client_start_invoke' do describe '#client_start_invoke' do
it 'sends keywords as metadata to the server when the are present' do it 'sends keywords as metadata to the server when the are present' do
call, pass_through = make_test_call, Proc.new { |x| x } call = make_test_call
done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue, ActiveCall.client_start_invoke(call, @client_queue, deadline,
deadline, k1: 'v1', k1: 'v1', k2: 'v2')
k2: 'v2')
@server.request_call(@server_tag) @server.request_call(@server_tag)
ev = @server_queue.next(deadline) ev = @server_queue.next(deadline)
expect(ev).to_not be_nil expect(ev).to_not be_nil
expect(ev.result.metadata['k1']).to eq('v1') expect(ev.result.metadata['k1']).to eq('v1')
expect(ev.result.metadata['k2']).to eq('v2') expect(ev.result.metadata['k2']).to eq('v2')
end end
end end
describe '#remote_read' do describe '#remote_read' do
it 'reads the response sent by a server' do it 'reads the response sent by a server' do
call, pass_through = make_test_call, Proc.new { |x| x } call = make_test_call
done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue, done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue,
deadline) deadline)
client_call = ActiveCall.new(call, @client_queue, @pass_through, client_call = ActiveCall.new(call, @client_queue, @pass_through,
@ -173,7 +169,7 @@ describe GRPC::ActiveCall do
end end
it 'saves metadata { status=200 } when the server adds no metadata' do it 'saves metadata { status=200 } when the server adds no metadata' do
call, pass_through = make_test_call, Proc.new { |x| x } call = make_test_call
done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue, done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue,
deadline) deadline)
client_call = ActiveCall.new(call, @client_queue, @pass_through, client_call = ActiveCall.new(call, @client_queue, @pass_through,
@ -186,11 +182,11 @@ describe GRPC::ActiveCall do
server_call.remote_send('ignore me') server_call.remote_send('ignore me')
expect(client_call.metadata).to be_nil expect(client_call.metadata).to be_nil
client_call.remote_read client_call.remote_read
expect(client_call.metadata).to eq({':status' => '200'}) expect(client_call.metadata).to eq(':status' => '200')
end end
it 'saves metadata add by the server' do it 'saves metadata add by the server' do
call, pass_through = make_test_call, Proc.new { |x| x } call = make_test_call
done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue, done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue,
deadline) deadline)
client_call = ActiveCall.new(call, @client_queue, @pass_through, client_call = ActiveCall.new(call, @client_queue, @pass_through,
@ -203,13 +199,12 @@ describe GRPC::ActiveCall do
server_call.remote_send('ignore me') server_call.remote_send('ignore me')
expect(client_call.metadata).to be_nil expect(client_call.metadata).to be_nil
client_call.remote_read client_call.remote_read
expect(client_call.metadata).to eq({':status' => '200', 'k1' => 'v1', expected = { ':status' => '200', 'k1' => 'v1', 'k2' => 'v2' }
'k2' => 'v2'}) expect(client_call.metadata).to eq(expected)
end end
it 'get a nil msg before a status when an OK status is sent' do it 'get a nil msg before a status when an OK status is sent' do
call, pass_through = make_test_call, Proc.new { |x| x } call = make_test_call
done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue, done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue,
deadline) deadline)
client_call = ActiveCall.new(call, @client_queue, @pass_through, client_call = ActiveCall.new(call, @client_queue, @pass_through,
@ -227,12 +222,11 @@ describe GRPC::ActiveCall do
expect(res).to be_nil expect(res).to be_nil
end end
it 'unmarshals the response using the unmarshal func' do it 'unmarshals the response using the unmarshal func' do
call = make_test_call call = make_test_call
done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue, done_tag, meta_tag = ActiveCall.client_start_invoke(call, @client_queue,
deadline) deadline)
unmarshal = Proc.new { |x| 'unmarshalled:' + x } unmarshal = proc { |x| 'unmarshalled:' + x }
client_call = ActiveCall.new(call, @client_queue, @pass_through, client_call = ActiveCall.new(call, @client_queue, @pass_through,
unmarshal, deadline, unmarshal, deadline,
finished_tag: done_tag, finished_tag: done_tag,
@ -245,7 +239,6 @@ describe GRPC::ActiveCall do
server_call.remote_send('server_response') server_call.remote_send('server_response')
expect(client_call.remote_read).to eq('unmarshalled:server_response') expect(client_call.remote_read).to eq('unmarshalled:server_response')
end end
end end
describe '#each_remote_read' do describe '#each_remote_read' do
@ -298,7 +291,6 @@ describe GRPC::ActiveCall do
server_call.send_status(OK, 'OK') server_call.send_status(OK, 'OK')
expect { e.next }.to raise_error(StopIteration) expect { e.next }.to raise_error(StopIteration)
end end
end end
describe '#writes_done' do describe '#writes_done' do
@ -357,7 +349,6 @@ describe GRPC::ActiveCall do
expect { client_call.writes_done(true) }.to_not raise_error expect { client_call.writes_done(true) }.to_not raise_error
expect { server_call.finished }.to_not raise_error expect { server_call.finished }.to_not raise_error
end end
end end
def expect_server_to_receive(sent_text, **kw) def expect_server_to_receive(sent_text, **kw)
@ -371,7 +362,7 @@ describe GRPC::ActiveCall do
ev = @server_queue.next(deadline) ev = @server_queue.next(deadline)
ev.call.add_metadata(kw) ev.call.add_metadata(kw)
ev.call.server_accept(@client_queue, @server_done_tag) ev.call.server_accept(@client_queue, @server_done_tag)
ev.call.server_end_initial_metadata() ev.call.server_end_initial_metadata
ActiveCall.new(ev.call, @client_queue, @pass_through, ActiveCall.new(ev.call, @client_queue, @pass_through,
@pass_through, deadline, @pass_through, deadline,
finished_tag: @server_done_tag) finished_tag: @server_done_tag)
@ -384,5 +375,4 @@ describe GRPC::ActiveCall do
def deadline def deadline
Time.now + 0.25 # in 0.25 seconds; arbitrary Time.now + 0.25 # in 0.25 seconds; arbitrary
end end
end end

@ -31,7 +31,7 @@ require 'grpc'
require 'xray/thread_dump_signal_handler' require 'xray/thread_dump_signal_handler'
require_relative '../port_picker' require_relative '../port_picker'
NOOP = Proc.new { |x| x } NOOP = proc { |x| x }
def wakey_thread(&blk) def wakey_thread(&blk)
awake_mutex, awake_cond = Mutex.new, ConditionVariable.new awake_mutex, awake_cond = Mutex.new, ConditionVariable.new
@ -52,7 +52,6 @@ include GRPC::Core::StatusCodes
include GRPC::Core::TimeConsts include GRPC::Core::TimeConsts
describe 'ClientStub' do describe 'ClientStub' do
before(:each) do before(:each) do
Thread.abort_on_exception = true Thread.abort_on_exception = true
@server = nil @server = nil
@ -67,11 +66,10 @@ describe 'ClientStub' do
end end
describe '#new' do describe '#new' do
it 'can be created from a host and args' do it 'can be created from a host and args' do
host = new_test_host host = new_test_host
opts = {:a_channel_arg => 'an_arg'} opts = { a_channel_arg: 'an_arg' }
blk = Proc.new do blk = proc do
GRPC::ClientStub.new(host, @cq, **opts) GRPC::ClientStub.new(host, @cq, **opts)
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
@ -79,8 +77,8 @@ describe 'ClientStub' do
it 'can be created with a default deadline' do it 'can be created with a default deadline' do
host = new_test_host host = new_test_host
opts = {:a_channel_arg => 'an_arg', :deadline => 5} opts = { a_channel_arg: 'an_arg', deadline: 5 }
blk = Proc.new do blk = proc do
GRPC::ClientStub.new(host, @cq, **opts) GRPC::ClientStub.new(host, @cq, **opts)
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
@ -88,8 +86,8 @@ describe 'ClientStub' do
it 'can be created with an channel override' do it 'can be created with an channel override' do
host = new_test_host host = new_test_host
opts = {:a_channel_arg => 'an_arg', :channel_override => @ch} opts = { a_channel_arg: 'an_arg', channel_override: @ch }
blk = Proc.new do blk = proc do
GRPC::ClientStub.new(host, @cq, **opts) GRPC::ClientStub.new(host, @cq, **opts)
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
@ -97,8 +95,8 @@ describe 'ClientStub' do
it 'cannot be created with a bad channel override' do it 'cannot be created with a bad channel override' do
host = new_test_host host = new_test_host
blk = Proc.new do blk = proc do
opts = {:a_channel_arg => 'an_arg', :channel_override => Object.new} opts = { a_channel_arg: 'an_arg', channel_override: Object.new }
GRPC::ClientStub.new(host, @cq, **opts) GRPC::ClientStub.new(host, @cq, **opts)
end end
expect(&blk).to raise_error expect(&blk).to raise_error
@ -106,8 +104,8 @@ describe 'ClientStub' do
it 'cannot be created with bad credentials' do it 'cannot be created with bad credentials' do
host = new_test_host host = new_test_host
blk = Proc.new do blk = proc do
opts = {:a_channel_arg => 'an_arg', :creds => Object.new} opts = { a_channel_arg: 'an_arg', creds: Object.new }
GRPC::ClientStub.new(host, @cq, **opts) GRPC::ClientStub.new(host, @cq, **opts)
end end
expect(&blk).to raise_error expect(&blk).to raise_error
@ -116,17 +114,16 @@ describe 'ClientStub' do
it 'can be created with test test credentials' do it 'can be created with test test credentials' do
certs = load_test_certs certs = load_test_certs
host = new_test_host host = new_test_host
blk = Proc.new do blk = proc do
opts = { opts = {
GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com', GRPC::Core::Channel::SSL_TARGET => 'foo.test.google.com',
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:creds => GRPC::Core::Credentials.new(certs[0], nil, nil) creds: GRPC::Core::Credentials.new(certs[0], nil, nil)
} }
GRPC::ClientStub.new(host, @cq, **opts) GRPC::ClientStub.new(host, @cq, **opts)
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
describe '#request_response' do describe '#request_response' do
@ -135,7 +132,6 @@ describe 'ClientStub' do
end end
shared_examples 'request response' do shared_examples 'request response' do
it 'should send a request to/receive a reply from a server' do it 'should send a request to/receive a reply from a server' do
host = new_test_host host = new_test_host
th = run_request_response(host, @sent_msg, @resp, @pass) th = run_request_response(host, @sent_msg, @resp, @pass)
@ -146,8 +142,8 @@ describe 'ClientStub' do
it 'should send metadata to the server ok' do it 'should send metadata to the server ok' do
host = new_test_host host = new_test_host
th = run_request_response(host, @sent_msg, @resp, @pass, k1: 'v1', th = run_request_response(host, @sent_msg, @resp, @pass,
k2: 'v2') k1: 'v1', k2: 'v2')
stub = GRPC::ClientStub.new(host, @cq) stub = GRPC::ClientStub.new(host, @cq)
expect(get_response(stub)).to eq(@resp) expect(get_response(stub)).to eq(@resp)
th.join th.join
@ -157,7 +153,10 @@ describe 'ClientStub' do
host = new_test_host host = new_test_host
th = run_request_response(host, @sent_msg, @resp, @pass, th = run_request_response(host, @sent_msg, @resp, @pass,
k1: 'updated-v1', k2: 'v2') k1: 'updated-v1', k2: 'v2')
update_md = Proc.new { |md| md[:k1] = 'updated-v1'; md } update_md = proc do |md|
md[:k1] = 'updated-v1'
md
end
stub = GRPC::ClientStub.new(host, @cq, update_metadata: update_md) stub = GRPC::ClientStub.new(host, @cq, update_metadata: update_md)
expect(get_response(stub)).to eq(@resp) expect(get_response(stub)).to eq(@resp)
th.join th.join
@ -167,7 +166,7 @@ describe 'ClientStub' do
alt_host = new_test_host alt_host = new_test_host
th = run_request_response(alt_host, @sent_msg, @resp, @pass) th = run_request_response(alt_host, @sent_msg, @resp, @pass)
ch = GRPC::Core::Channel.new(alt_host, nil) ch = GRPC::Core::Channel.new(alt_host, nil)
stub = GRPC::ClientStub.new('ignored-host', @cq, channel_override:ch) stub = GRPC::ClientStub.new('ignored-host', @cq, channel_override: ch)
expect(get_response(stub)).to eq(@resp) expect(get_response(stub)).to eq(@resp)
th.join th.join
end end
@ -176,45 +175,37 @@ describe 'ClientStub' do
host = new_test_host host = new_test_host
th = run_request_response(host, @sent_msg, @resp, @fail) th = run_request_response(host, @sent_msg, @resp, @fail)
stub = GRPC::ClientStub.new(host, @cq) stub = GRPC::ClientStub.new(host, @cq)
blk = Proc.new { get_response(stub) } blk = proc { get_response(stub) }
expect(&blk).to raise_error(GRPC::BadStatus) expect(&blk).to raise_error(GRPC::BadStatus)
th.join th.join
end end
end end
describe 'without a call operation' do describe 'without a call operation' do
def get_response(stub) def get_response(stub)
stub.request_response(@method, @sent_msg, NOOP, NOOP, k1: 'v1', stub.request_response(@method, @sent_msg, NOOP, NOOP,
k2: 'v2') k1: 'v1', k2: 'v2')
end end
it_behaves_like 'request response' it_behaves_like 'request response'
end end
describe 'via a call operation' do describe 'via a call operation' do
def get_response(stub) def get_response(stub)
op = stub.request_response(@method, @sent_msg, NOOP, NOOP, op = stub.request_response(@method, @sent_msg, NOOP, NOOP,
return_op:true, k1: 'v1', k2: 'v2') return_op: true, k1: 'v1', k2: 'v2')
expect(op).to be_a(GRPC::ActiveCall::Operation) expect(op).to be_a(GRPC::ActiveCall::Operation)
op.execute() op.execute
end end
it_behaves_like 'request response' it_behaves_like 'request response'
end end
end end
describe '#client_streamer' do describe '#client_streamer' do
shared_examples 'client streaming' do shared_examples 'client streaming' do
before(:each) do before(:each) do
@sent_msgs = Array.new(3) { |i| 'msg_' + (i+1).to_s } @sent_msgs = Array.new(3) { |i| 'msg_' + (i + 1).to_s }
@resp = 'a_reply' @resp = 'a_reply'
end end
@ -228,19 +219,21 @@ describe 'ClientStub' do
it 'should send metadata to the server ok' do it 'should send metadata to the server ok' do
host = new_test_host host = new_test_host
th = run_client_streamer(host, @sent_msgs, @resp, @pass, k1: 'v1', th = run_client_streamer(host, @sent_msgs, @resp, @pass,
k2: 'v2') k1: 'v1', k2: 'v2')
stub = GRPC::ClientStub.new(host, @cq) stub = GRPC::ClientStub.new(host, @cq)
expect(get_response(stub)).to eq(@resp) expect(get_response(stub)).to eq(@resp)
th.join th.join
end end
it 'should update the sent metadata with a provided metadata updater' do it 'should update the sent metadata with a provided metadata updater' do
host = new_test_host host = new_test_host
th = run_client_streamer(host, @sent_msgs, @resp, @pass, th = run_client_streamer(host, @sent_msgs, @resp, @pass,
k1: 'updated-v1', k2: 'v2') k1: 'updated-v1', k2: 'v2')
update_md = Proc.new { |md| md[:k1] = 'updated-v1'; md } update_md = proc do |md|
md[:k1] = 'updated-v1'
md
end
stub = GRPC::ClientStub.new(host, @cq, update_metadata: update_md) stub = GRPC::ClientStub.new(host, @cq, update_metadata: update_md)
expect(get_response(stub)).to eq(@resp) expect(get_response(stub)).to eq(@resp)
th.join th.join
@ -250,46 +243,38 @@ describe 'ClientStub' do
host = new_test_host host = new_test_host
th = run_client_streamer(host, @sent_msgs, @resp, @fail) th = run_client_streamer(host, @sent_msgs, @resp, @fail)
stub = GRPC::ClientStub.new(host, @cq) stub = GRPC::ClientStub.new(host, @cq)
blk = Proc.new { get_response(stub) } blk = proc { get_response(stub) }
expect(&blk).to raise_error(GRPC::BadStatus) expect(&blk).to raise_error(GRPC::BadStatus)
th.join th.join
end end
end end
describe 'without a call operation' do describe 'without a call operation' do
def get_response(stub) def get_response(stub)
stub.client_streamer(@method, @sent_msgs, NOOP, NOOP, k1: 'v1', stub.client_streamer(@method, @sent_msgs, NOOP, NOOP,
k2: 'v2') k1: 'v1', k2: 'v2')
end end
it_behaves_like 'client streaming' it_behaves_like 'client streaming'
end end
describe 'via a call operation' do describe 'via a call operation' do
def get_response(stub) def get_response(stub)
op = stub.client_streamer(@method, @sent_msgs, NOOP, NOOP, op = stub.client_streamer(@method, @sent_msgs, NOOP, NOOP,
return_op:true, k1: 'v1', k2: 'v2') return_op: true, k1: 'v1', k2: 'v2')
expect(op).to be_a(GRPC::ActiveCall::Operation) expect(op).to be_a(GRPC::ActiveCall::Operation)
resp = op.execute() op.execute
end end
it_behaves_like 'client streaming' it_behaves_like 'client streaming'
end end
end end
describe '#server_streamer' do describe '#server_streamer' do
shared_examples 'server streaming' do shared_examples 'server streaming' do
before(:each) do before(:each) do
@sent_msg = 'a_msg' @sent_msg = 'a_msg'
@replys = Array.new(3) { |i| 'reply_' + (i+1).to_s } @replys = Array.new(3) { |i| 'reply_' + (i + 1).to_s }
end end
it 'should send a request to/receive replies from a server' do it 'should send a request to/receive replies from a server' do
@ -311,8 +296,8 @@ describe 'ClientStub' do
it 'should send metadata to the server ok' do it 'should send metadata to the server ok' do
host = new_test_host host = new_test_host
th = run_server_streamer(host, @sent_msg, @replys, @fail, k1: 'v1', th = run_server_streamer(host, @sent_msg, @replys, @fail,
k2: 'v2') k1: 'v1', k2: 'v2')
stub = GRPC::ClientStub.new(host, @cq) stub = GRPC::ClientStub.new(host, @cq)
e = get_responses(stub) e = get_responses(stub)
expect { e.collect { |r| r } }.to raise_error(GRPC::BadStatus) expect { e.collect { |r| r } }.to raise_error(GRPC::BadStatus)
@ -323,55 +308,50 @@ describe 'ClientStub' do
host = new_test_host host = new_test_host
th = run_server_streamer(host, @sent_msg, @replys, @pass, th = run_server_streamer(host, @sent_msg, @replys, @pass,
k1: 'updated-v1', k2: 'v2') k1: 'updated-v1', k2: 'v2')
update_md = Proc.new { |md| md[:k1] = 'updated-v1'; md } update_md = proc do |md|
md[:k1] = 'updated-v1'
md
end
stub = GRPC::ClientStub.new(host, @cq, update_metadata: update_md) stub = GRPC::ClientStub.new(host, @cq, update_metadata: update_md)
e = get_responses(stub) e = get_responses(stub)
expect(e.collect { |r| r }).to eq(@replys) expect(e.collect { |r| r }).to eq(@replys)
th.join th.join
end end
end end
describe 'without a call operation' do describe 'without a call operation' do
def get_responses(stub) def get_responses(stub)
e = stub.server_streamer(@method, @sent_msg, NOOP, NOOP, k1: 'v1', e = stub.server_streamer(@method, @sent_msg, NOOP, NOOP,
k2: 'v2') k1: 'v1', k2: 'v2')
expect(e).to be_a(Enumerator) expect(e).to be_a(Enumerator)
e e
end end
it_behaves_like 'server streaming' it_behaves_like 'server streaming'
end end
describe 'via a call operation' do describe 'via a call operation' do
def get_responses(stub) def get_responses(stub)
op = stub.server_streamer(@method, @sent_msg, NOOP, NOOP, op = stub.server_streamer(@method, @sent_msg, NOOP, NOOP,
return_op:true, k1: 'v1', k2: 'v2') return_op: true, k1: 'v1', k2: 'v2')
expect(op).to be_a(GRPC::ActiveCall::Operation) expect(op).to be_a(GRPC::ActiveCall::Operation)
e = op.execute() e = op.execute
expect(e).to be_a(Enumerator) expect(e).to be_a(Enumerator)
e e
end end
it_behaves_like 'server streaming' it_behaves_like 'server streaming'
end end
end end
describe '#bidi_streamer' do describe '#bidi_streamer' do
shared_examples 'bidi streaming' do shared_examples 'bidi streaming' do
before(:each) do before(:each) do
@sent_msgs = Array.new(3) { |i| 'msg_' + (i+1).to_s } @sent_msgs = Array.new(3) { |i| 'msg_' + (i + 1).to_s }
@replys = Array.new(3) { |i| 'reply_' + (i+1).to_s } @replys = Array.new(3) { |i| 'reply_' + (i + 1).to_s }
end end
it 'supports sending all the requests first', :bidi => true do it 'supports sending all the requests first', bidi: true do
host = new_test_host host = new_test_host
th = run_bidi_streamer_handle_inputs_first(host, @sent_msgs, @replys, th = run_bidi_streamer_handle_inputs_first(host, @sent_msgs, @replys,
@pass) @pass)
@ -381,7 +361,7 @@ describe 'ClientStub' do
th.join th.join
end end
it 'supports client-initiated ping pong', :bidi => true do it 'supports client-initiated ping pong', bidi: true do
host = new_test_host host = new_test_host
th = run_bidi_streamer_echo_ping_pong(host, @sent_msgs, @pass, true) th = run_bidi_streamer_echo_ping_pong(host, @sent_msgs, @pass, true)
stub = GRPC::ClientStub.new(host, @cq) stub = GRPC::ClientStub.new(host, @cq)
@ -396,7 +376,7 @@ describe 'ClientStub' do
# servers don't know if all the client metadata has been sent until # servers don't know if all the client metadata has been sent until
# they receive a message from the client. Without receiving all the # they receive a message from the client. Without receiving all the
# metadata, the server does not accept the call, so this test hangs. # metadata, the server does not accept the call, so this test hangs.
xit 'supports a server-initiated ping pong', :bidi => true do xit 'supports a server-initiated ping pong', bidi: true do
host = new_test_host host = new_test_host
th = run_bidi_streamer_echo_ping_pong(host, @sent_msgs, @pass, false) th = run_bidi_streamer_echo_ping_pong(host, @sent_msgs, @pass, false)
stub = GRPC::ClientStub.new(host, @cq) stub = GRPC::ClientStub.new(host, @cq)
@ -404,11 +384,9 @@ describe 'ClientStub' do
expect(e.collect { |r| r }).to eq(@sent_msgs) expect(e.collect { |r| r }).to eq(@sent_msgs)
th.join th.join
end end
end end
describe 'without a call operation' do describe 'without a call operation' do
def get_responses(stub) def get_responses(stub)
e = stub.bidi_streamer(@method, @sent_msgs, NOOP, NOOP) e = stub.bidi_streamer(@method, @sent_msgs, NOOP, NOOP)
expect(e).to be_a(Enumerator) expect(e).to be_a(Enumerator)
@ -416,13 +394,12 @@ describe 'ClientStub' do
end end
it_behaves_like 'bidi streaming' it_behaves_like 'bidi streaming'
end end
describe 'via a call operation' do describe 'via a call operation' do
def get_responses(stub) def get_responses(stub)
op = stub.bidi_streamer(@method, @sent_msgs, NOOP, NOOP, return_op:true) op = stub.bidi_streamer(@method, @sent_msgs, NOOP, NOOP,
return_op: true)
expect(op).to be_a(GRPC::ActiveCall::Operation) expect(op).to be_a(GRPC::ActiveCall::Operation)
e = op.execute e = op.execute
expect(e).to be_a(Enumerator) expect(e).to be_a(Enumerator)
@ -430,9 +407,7 @@ describe 'ClientStub' do
end end
it_behaves_like 'bidi streaming' it_behaves_like 'bidi streaming'
end end
end end
def run_server_streamer(hostname, expected_input, replys, status, **kw) def run_server_streamer(hostname, expected_input, replys, status, **kw)
@ -514,14 +489,13 @@ describe 'ClientStub' do
def expect_server_to_be_invoked(hostname, awake_mutex, awake_cond) def expect_server_to_be_invoked(hostname, awake_mutex, awake_cond)
server_queue = start_test_server(hostname, awake_mutex, awake_cond) server_queue = start_test_server(hostname, awake_mutex, awake_cond)
test_deadline = Time.now + 10 # fail tests after 10 seconds
ev = server_queue.pluck(@server_tag, INFINITE_FUTURE) ev = server_queue.pluck(@server_tag, INFINITE_FUTURE)
raise OutOfTime if ev.nil? fail OutOfTime if ev.nil?
server_call = ev.call server_call = ev.call
server_call.metadata = ev.result.metadata server_call.metadata = ev.result.metadata
finished_tag = Object.new finished_tag = Object.new
server_call.server_accept(server_queue, finished_tag) server_call.server_accept(server_queue, finished_tag)
server_call.server_end_initial_metadata() server_call.server_end_initial_metadata
GRPC::ActiveCall.new(server_call, server_queue, NOOP, NOOP, INFINITE_FUTURE, GRPC::ActiveCall.new(server_call, server_queue, NOOP, NOOP, INFINITE_FUTURE,
finished_tag: finished_tag) finished_tag: finished_tag)
end end
@ -530,5 +504,4 @@ describe 'ClientStub' do
port = find_unused_tcp_port port = find_unused_tcp_port
"localhost:#{port}" "localhost:#{port}"
end end
end end

@ -30,9 +30,7 @@
require 'grpc' require 'grpc'
require 'grpc/generic/rpc_desc' require 'grpc/generic/rpc_desc'
describe GRPC::RpcDesc do describe GRPC::RpcDesc do
RpcDesc = GRPC::RpcDesc RpcDesc = GRPC::RpcDesc
Stream = RpcDesc::Stream Stream = RpcDesc::Stream
OK = GRPC::Core::StatusCodes::OK OK = GRPC::Core::StatusCodes::OK
@ -56,7 +54,6 @@ describe GRPC::RpcDesc do
end end
describe '#run_server_method' do describe '#run_server_method' do
describe 'for request responses' do describe 'for request responses' do
before(:each) do before(:each) do
@call = double('active_call') @call = double('active_call')
@ -78,7 +75,7 @@ describe GRPC::RpcDesc do
it 'absorbs EventError with no further action' do it 'absorbs EventError with no further action' do
expect(@call).to receive(:remote_read).once.and_raise(EventError) expect(@call).to receive(:remote_read).once.and_raise(EventError)
blk = Proc.new do blk = proc do
@request_response.run_server_method(@call, method(:fake_reqresp)) @request_response.run_server_method(@call, method(:fake_reqresp))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -86,7 +83,7 @@ describe GRPC::RpcDesc do
it 'absorbs CallError with no further action' do it 'absorbs CallError with no further action' do
expect(@call).to receive(:remote_read).once.and_raise(CallError) expect(@call).to receive(:remote_read).once.and_raise(CallError)
blk = Proc.new do blk = proc do
@request_response.run_server_method(@call, method(:fake_reqresp)) @request_response.run_server_method(@call, method(:fake_reqresp))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -100,7 +97,6 @@ describe GRPC::RpcDesc do
expect(@call).to receive(:finished).once expect(@call).to receive(:finished).once
@request_response.run_server_method(@call, method(:fake_reqresp)) @request_response.run_server_method(@call, method(:fake_reqresp))
end end
end end
describe 'for client streamers' do describe 'for client streamers' do
@ -122,7 +118,7 @@ describe GRPC::RpcDesc do
it 'absorbs EventError with no further action' do it 'absorbs EventError with no further action' do
expect(@call).to receive(:remote_send).once.and_raise(EventError) expect(@call).to receive(:remote_send).once.and_raise(EventError)
blk = Proc.new do blk = proc do
@client_streamer.run_server_method(@call, method(:fake_clstream)) @client_streamer.run_server_method(@call, method(:fake_clstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -130,20 +126,18 @@ describe GRPC::RpcDesc do
it 'absorbs CallError with no further action' do it 'absorbs CallError with no further action' do
expect(@call).to receive(:remote_send).once.and_raise(CallError) expect(@call).to receive(:remote_send).once.and_raise(CallError)
blk = Proc.new do blk = proc do
@client_streamer.run_server_method(@call, method(:fake_clstream)) @client_streamer.run_server_method(@call, method(:fake_clstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'sends a response and closes the stream if there no errors' do it 'sends a response and closes the stream if there no errors' do
req = Object.new
expect(@call).to receive(:remote_send).once.with(@ok_response) expect(@call).to receive(:remote_send).once.with(@ok_response)
expect(@call).to receive(:send_status).once.with(OK, 'OK') expect(@call).to receive(:send_status).once.with(OK, 'OK')
expect(@call).to receive(:finished).once expect(@call).to receive(:finished).once
@client_streamer.run_server_method(@call, method(:fake_clstream)) @client_streamer.run_server_method(@call, method(:fake_clstream))
end end
end end
describe 'for server streaming' do describe 'for server streaming' do
@ -167,7 +161,7 @@ describe GRPC::RpcDesc do
it 'absorbs EventError with no further action' do it 'absorbs EventError with no further action' do
expect(@call).to receive(:remote_read).once.and_raise(EventError) expect(@call).to receive(:remote_read).once.and_raise(EventError)
blk = Proc.new do blk = proc do
@server_streamer.run_server_method(@call, method(:fake_svstream)) @server_streamer.run_server_method(@call, method(:fake_svstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -175,7 +169,7 @@ describe GRPC::RpcDesc do
it 'absorbs CallError with no further action' do it 'absorbs CallError with no further action' do
expect(@call).to receive(:remote_read).once.and_raise(CallError) expect(@call).to receive(:remote_read).once.and_raise(CallError)
blk = Proc.new do blk = proc do
@server_streamer.run_server_method(@call, method(:fake_svstream)) @server_streamer.run_server_method(@call, method(:fake_svstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -189,7 +183,6 @@ describe GRPC::RpcDesc do
expect(@call).to receive(:finished).once expect(@call).to receive(:finished).once
@server_streamer.run_server_method(@call, method(:fake_svstream)) @server_streamer.run_server_method(@call, method(:fake_svstream))
end end
end end
describe 'for bidi streamers' do describe 'for bidi streamers' do
@ -215,30 +208,27 @@ describe GRPC::RpcDesc do
end end
it 'closes the stream if there no errors' do it 'closes the stream if there no errors' do
req = Object.new
expect(@call).to receive(:run_server_bidi) expect(@call).to receive(:run_server_bidi)
expect(@call).to receive(:send_status).once.with(OK, 'OK') expect(@call).to receive(:send_status).once.with(OK, 'OK')
expect(@call).to receive(:finished).once expect(@call).to receive(:finished).once
@bidi_streamer.run_server_method(@call, method(:fake_bidistream)) @bidi_streamer.run_server_method(@call, method(:fake_bidistream))
end end
end end
end end
describe '#assert_arity_matches' do describe '#assert_arity_matches' do
def no_arg def no_arg
end end
def fake_clstream(arg) def fake_clstream(_arg)
end end
def fake_svstream(arg1, arg2) def fake_svstream(_arg1, _arg2)
end end
it 'raises when a request_response does not have 2 args' do it 'raises when a request_response does not have 2 args' do
[:fake_clstream, :no_arg].each do |mth| [:fake_clstream, :no_arg].each do |mth|
blk = Proc.new do blk = proc do
@request_response.assert_arity_matches(method(mth)) @request_response.assert_arity_matches(method(mth))
end end
expect(&blk).to raise_error expect(&blk).to raise_error
@ -246,7 +236,7 @@ describe GRPC::RpcDesc do
end end
it 'passes when a request_response has 2 args' do it 'passes when a request_response has 2 args' do
blk = Proc.new do blk = proc do
@request_response.assert_arity_matches(method(:fake_svstream)) @request_response.assert_arity_matches(method(:fake_svstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -254,7 +244,7 @@ describe GRPC::RpcDesc do
it 'raises when a server_streamer does not have 2 args' do it 'raises when a server_streamer does not have 2 args' do
[:fake_clstream, :no_arg].each do |mth| [:fake_clstream, :no_arg].each do |mth|
blk = Proc.new do blk = proc do
@server_streamer.assert_arity_matches(method(mth)) @server_streamer.assert_arity_matches(method(mth))
end end
expect(&blk).to raise_error expect(&blk).to raise_error
@ -262,7 +252,7 @@ describe GRPC::RpcDesc do
end end
it 'passes when a server_streamer has 2 args' do it 'passes when a server_streamer has 2 args' do
blk = Proc.new do blk = proc do
@server_streamer.assert_arity_matches(method(:fake_svstream)) @server_streamer.assert_arity_matches(method(:fake_svstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -270,7 +260,7 @@ describe GRPC::RpcDesc do
it 'raises when a client streamer does not have 1 arg' do it 'raises when a client streamer does not have 1 arg' do
[:fake_svstream, :no_arg].each do |mth| [:fake_svstream, :no_arg].each do |mth|
blk = Proc.new do blk = proc do
@client_streamer.assert_arity_matches(method(mth)) @client_streamer.assert_arity_matches(method(mth))
end end
expect(&blk).to raise_error expect(&blk).to raise_error
@ -278,16 +268,15 @@ describe GRPC::RpcDesc do
end end
it 'passes when a client_streamer has 1 arg' do it 'passes when a client_streamer has 1 arg' do
blk = Proc.new do blk = proc do
@client_streamer.assert_arity_matches(method(:fake_clstream)) @client_streamer.assert_arity_matches(method(:fake_clstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'raises when a bidi streamer does not have 1 arg' do it 'raises when a bidi streamer does not have 1 arg' do
[:fake_svstream, :no_arg].each do |mth| [:fake_svstream, :no_arg].each do |mth|
blk = Proc.new do blk = proc do
@bidi_streamer.assert_arity_matches(method(mth)) @bidi_streamer.assert_arity_matches(method(mth))
end end
expect(&blk).to raise_error expect(&blk).to raise_error
@ -295,88 +284,78 @@ describe GRPC::RpcDesc do
end end
it 'passes when a bidi streamer has 1 arg' do it 'passes when a bidi streamer has 1 arg' do
blk = Proc.new do blk = proc do
@bidi_streamer.assert_arity_matches(method(:fake_clstream)) @bidi_streamer.assert_arity_matches(method(:fake_clstream))
end end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
describe '#is_request_response?' do describe '#request_response?' do
it 'is true only input and output are both not Streams' do it 'is true only input and output are both not Streams' do
expect(@request_response.is_request_response?).to be(true) expect(@request_response.request_response?).to be(true)
expect(@client_streamer.is_request_response?).to be(false) expect(@client_streamer.request_response?).to be(false)
expect(@bidi_streamer.is_request_response?).to be(false) expect(@bidi_streamer.request_response?).to be(false)
expect(@server_streamer.is_request_response?).to be(false) expect(@server_streamer.request_response?).to be(false)
end end
end end
describe '#is_client_streamer?' do describe '#client_streamer?' do
it 'is true only when input is a Stream and output is not a Stream' do it 'is true only when input is a Stream and output is not a Stream' do
expect(@client_streamer.is_client_streamer?).to be(true) expect(@client_streamer.client_streamer?).to be(true)
expect(@request_response.is_client_streamer?).to be(false) expect(@request_response.client_streamer?).to be(false)
expect(@server_streamer.is_client_streamer?).to be(false) expect(@server_streamer.client_streamer?).to be(false)
expect(@bidi_streamer.is_client_streamer?).to be(false) expect(@bidi_streamer.client_streamer?).to be(false)
end end
end end
describe '#is_server_streamer?' do describe '#server_streamer?' do
it 'is true only when output is a Stream and input is not a Stream' do it 'is true only when output is a Stream and input is not a Stream' do
expect(@server_streamer.is_server_streamer?).to be(true) expect(@server_streamer.server_streamer?).to be(true)
expect(@client_streamer.is_server_streamer?).to be(false) expect(@client_streamer.server_streamer?).to be(false)
expect(@request_response.is_server_streamer?).to be(false) expect(@request_response.server_streamer?).to be(false)
expect(@bidi_streamer.is_server_streamer?).to be(false) expect(@bidi_streamer.server_streamer?).to be(false)
end end
end end
describe '#is_bidi_streamer?' do describe '#bidi_streamer?' do
it 'is true only when output is a Stream and input is a Stream' do it 'is true only when output is a Stream and input is a Stream' do
expect(@bidi_streamer.is_bidi_streamer?).to be(true) expect(@bidi_streamer.bidi_streamer?).to be(true)
expect(@server_streamer.is_bidi_streamer?).to be(false) expect(@server_streamer.bidi_streamer?).to be(false)
expect(@client_streamer.is_bidi_streamer?).to be(false) expect(@client_streamer.bidi_streamer?).to be(false)
expect(@request_response.is_bidi_streamer?).to be(false) expect(@request_response.bidi_streamer?).to be(false)
end end
end end
def fake_reqresp(req, call) def fake_reqresp(_req, _call)
@ok_response @ok_response
end end
def fake_clstream(call) def fake_clstream(_call)
@ok_response @ok_response
end end
def fake_svstream(req, call) def fake_svstream(_req, _call)
[@ok_response, @ok_response] [@ok_response, @ok_response]
end end
def fake_bidistream(an_array) def fake_bidistream(an_array)
return an_array an_array
end end
def bad_status(req, call) def bad_status(_req, _call)
raise GRPC::BadStatus.new(@bs_code, 'NOK') fail GRPC::BadStatus.new(@bs_code, 'NOK')
end end
def other_error(req, call) def other_error(_req, _call)
raise ArgumentError.new('other error') fail(ArgumentError, 'other error')
end end
def bad_status_alt(call) def bad_status_alt(_call)
raise GRPC::BadStatus.new(@bs_code, 'NOK') fail GRPC::BadStatus.new(@bs_code, 'NOK')
end end
def other_error_alt(call) def other_error_alt(_call)
raise ArgumentError.new('other error') fail(ArgumentError, 'other error')
end end
end end

@ -33,9 +33,7 @@ require 'xray/thread_dump_signal_handler'
Pool = GRPC::RpcServer::Pool Pool = GRPC::RpcServer::Pool
describe Pool do describe Pool do
describe '#new' do describe '#new' do
it 'raises if a non-positive size is used' do it 'raises if a non-positive size is used' do
expect { Pool.new(0) }.to raise_error expect { Pool.new(0) }.to raise_error
expect { Pool.new(-1) }.to raise_error expect { Pool.new(-1) }.to raise_error
@ -45,11 +43,9 @@ describe Pool do
it 'is constructed OK with a positive size' do it 'is constructed OK with a positive size' do
expect { Pool.new(1) }.not_to raise_error expect { Pool.new(1) }.not_to raise_error
end end
end end
describe '#jobs_waiting' do describe '#jobs_waiting' do
it 'at start, it is zero' do it 'at start, it is zero' do
p = Pool.new(1) p = Pool.new(1)
expect(p.jobs_waiting).to be(0) expect(p.jobs_waiting).to be(0)
@ -57,74 +53,67 @@ describe Pool do
it 'it increases, with each scheduled job if the pool is not running' do it 'it increases, with each scheduled job if the pool is not running' do
p = Pool.new(1) p = Pool.new(1)
job = Proc.new { } job = proc {}
expect(p.jobs_waiting).to be(0) expect(p.jobs_waiting).to be(0)
5.times do |i| 5.times do |i|
p.schedule(&job) p.schedule(&job)
expect(p.jobs_waiting).to be(i + 1) expect(p.jobs_waiting).to be(i + 1)
end end
end end
it 'it decreases as jobs are run' do it 'it decreases as jobs are run' do
p = Pool.new(1) p = Pool.new(1)
job = Proc.new { } job = proc {}
expect(p.jobs_waiting).to be(0) expect(p.jobs_waiting).to be(0)
3.times do |i| 3.times do
p.schedule(&job) p.schedule(&job)
end end
p.start p.start
sleep 2 sleep 2
expect(p.jobs_waiting).to be(0) expect(p.jobs_waiting).to be(0)
end end
end end
describe '#schedule' do describe '#schedule' do
it 'throws if the pool is already stopped' do it 'throws if the pool is already stopped' do
p = Pool.new(1) p = Pool.new(1)
p.stop() p.stop
job = Proc.new { } job = proc {}
expect { p.schedule(&job) }.to raise_error expect { p.schedule(&job) }.to raise_error
end end
it 'adds jobs that get run by the pool' do it 'adds jobs that get run by the pool' do
p = Pool.new(1) p = Pool.new(1)
p.start() p.start
o, q = Object.new, Queue.new o, q = Object.new, Queue.new
job = Proc.new { q.push(o) } job = proc { q.push(o) }
p.schedule(&job) p.schedule(&job)
expect(q.pop).to be(o) expect(q.pop).to be(o)
p.stop p.stop
end end
end end
describe '#stop' do describe '#stop' do
it 'works when there are no scheduled tasks' do it 'works when there are no scheduled tasks' do
p = Pool.new(1) p = Pool.new(1)
expect { p.stop() }.not_to raise_error expect { p.stop }.not_to raise_error
end end
it 'stops jobs when there are long running jobs' do it 'stops jobs when there are long running jobs' do
p = Pool.new(1) p = Pool.new(1)
p.start() p.start
o, q = Object.new, Queue.new o, q = Object.new, Queue.new
job = Proc.new do job = proc do
sleep(5) # long running sleep(5) # long running
q.push(o) q.push(o)
end end
p.schedule(&job) p.schedule(&job)
sleep(1) # should ensure the long job gets scheduled sleep(1) # should ensure the long job gets scheduled
expect { p.stop() }.not_to raise_error expect { p.stop }.not_to raise_error
end end
end end
describe '#start' do describe '#start' do
it 'runs pre-scheduled jobs' do it 'runs pre-scheduled jobs' do
p = Pool.new(2) p = Pool.new(2)
o, q = Object.new, Queue.new o, q = Object.new, Queue.new
@ -146,7 +135,5 @@ describe Pool do
end end
p.stop p.stop
end end
end end
end end

@ -37,33 +37,37 @@ def load_test_certs
files.map { |f| File.open(File.join(test_root, f)).read } files.map { |f| File.open(File.join(test_root, f)).read }
end end
# A test message
class EchoMsg class EchoMsg
def self.marshal(o) def self.marshal(_o)
'' ''
end end
def self.unmarshal(o) def self.unmarshal(_o)
EchoMsg.new EchoMsg.new
end end
end end
# A test service with no methods.
class EmptyService class EmptyService
include GRPC::GenericService include GRPC::GenericService
end end
# A test service without an implementation.
class NoRpcImplementation class NoRpcImplementation
include GRPC::GenericService include GRPC::GenericService
rpc :an_rpc, EchoMsg, EchoMsg rpc :an_rpc, EchoMsg, EchoMsg
end end
# A test service with an implementation.
class EchoService class EchoService
include GRPC::GenericService include GRPC::GenericService
rpc :an_rpc, EchoMsg, EchoMsg rpc :an_rpc, EchoMsg, EchoMsg
def initialize(default_var='ignored') def initialize(_default_var = 'ignored')
end end
def an_rpc(req, call) def an_rpc(req, _call)
logger.info('echo service received a request') logger.info('echo service received a request')
req req
end end
@ -71,14 +75,15 @@ end
EchoStub = EchoService.rpc_stub_class EchoStub = EchoService.rpc_stub_class
# A slow test service.
class SlowService class SlowService
include GRPC::GenericService include GRPC::GenericService
rpc :an_rpc, EchoMsg, EchoMsg rpc :an_rpc, EchoMsg, EchoMsg
def initialize(default_var='ignored') def initialize(_default_var = 'ignored')
end end
def an_rpc(req, call) def an_rpc(req, _call)
delay = 0.25 delay = 0.25
logger.info("starting a slow #{delay} rpc") logger.info("starting a slow #{delay} rpc")
sleep delay sleep delay
@ -89,7 +94,6 @@ end
SlowStub = SlowService.rpc_stub_class SlowStub = SlowService.rpc_stub_class
describe GRPC::RpcServer do describe GRPC::RpcServer do
RpcServer = GRPC::RpcServer RpcServer = GRPC::RpcServer
StatusCodes = GRPC::Core::StatusCodes StatusCodes = GRPC::Core::StatusCodes
@ -97,7 +101,7 @@ describe GRPC::RpcServer do
@method = 'an_rpc_method' @method = 'an_rpc_method'
@pass = 0 @pass = 0
@fail = 1 @fail = 1
@noop = Proc.new { |x| x } @noop = proc { |x| x }
@server_queue = GRPC::Core::CompletionQueue.new @server_queue = GRPC::Core::CompletionQueue.new
port = find_unused_tcp_port port = find_unused_tcp_port
@ -112,18 +116,17 @@ describe GRPC::RpcServer do
end end
describe '#new' do describe '#new' do
it 'can be created with just some args' do it 'can be created with just some args' do
opts = {:a_channel_arg => 'an_arg'} opts = { a_channel_arg: 'an_arg' }
blk = Proc.new do blk = proc do
RpcServer.new(**opts) RpcServer.new(**opts)
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
end end
it 'can be created with a default deadline' do it 'can be created with a default deadline' do
opts = {:a_channel_arg => 'an_arg', :deadline => 5} opts = { a_channel_arg: 'an_arg', deadline: 5 }
blk = Proc.new do blk = proc do
RpcServer.new(**opts) RpcServer.new(**opts)
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
@ -131,20 +134,20 @@ describe GRPC::RpcServer do
it 'can be created with a completion queue override' do it 'can be created with a completion queue override' do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:completion_queue_override => @server_queue completion_queue_override: @server_queue
} }
blk = Proc.new do blk = proc do
RpcServer.new(**opts) RpcServer.new(**opts)
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
end end
it 'cannot be created with a bad completion queue override' do it 'cannot be created with a bad completion queue override' do
blk = Proc.new do blk = proc do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:completion_queue_override => Object.new completion_queue_override: Object.new
} }
RpcServer.new(**opts) RpcServer.new(**opts)
end end
@ -152,10 +155,10 @@ describe GRPC::RpcServer do
end end
it 'cannot be created with invalid ServerCredentials' do it 'cannot be created with invalid ServerCredentials' do
blk = Proc.new do blk = proc do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:creds => Object.new creds: Object.new
} }
RpcServer.new(**opts) RpcServer.new(**opts)
end end
@ -165,10 +168,10 @@ describe GRPC::RpcServer do
it 'can be created with the creds as valid ServerCedentials' do it 'can be created with the creds as valid ServerCedentials' do
certs = load_test_certs certs = load_test_certs
server_creds = GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2]) server_creds = GRPC::Core::ServerCredentials.new(nil, certs[1], certs[2])
blk = Proc.new do blk = proc do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:creds => server_creds creds: server_creds
} }
RpcServer.new(**opts) RpcServer.new(**opts)
end end
@ -176,30 +179,28 @@ describe GRPC::RpcServer do
end end
it 'can be created with a server override' do it 'can be created with a server override' do
opts = {:a_channel_arg => 'an_arg', :server_override => @server} opts = { a_channel_arg: 'an_arg', server_override: @server }
blk = Proc.new do blk = proc do
RpcServer.new(**opts) RpcServer.new(**opts)
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
end end
it 'cannot be created with a bad server override' do it 'cannot be created with a bad server override' do
blk = Proc.new do blk = proc do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:server_override => Object.new server_override: Object.new
} }
RpcServer.new(**opts) RpcServer.new(**opts)
end end
expect(&blk).to raise_error expect(&blk).to raise_error
end end
end end
describe '#stopped?' do describe '#stopped?' do
before(:each) do before(:each) do
opts = {:a_channel_arg => 'an_arg', :poll_period => 1} opts = { a_channel_arg: 'an_arg', poll_period: 1 }
@srv = RpcServer.new(**opts) @srv = RpcServer.new(**opts)
end end
@ -229,33 +230,31 @@ describe GRPC::RpcServer do
expect(@srv.stopped?).to be(true) expect(@srv.stopped?).to be(true)
t.join t.join
end end
end end
describe '#running?' do describe '#running?' do
it 'starts out false' do it 'starts out false' do
opts = {:a_channel_arg => 'an_arg', :server_override => @server} opts = { a_channel_arg: 'an_arg', server_override: @server }
r = RpcServer.new(**opts) r = RpcServer.new(**opts)
expect(r.running?).to be(false) expect(r.running?).to be(false)
end end
it 'is false after run is called with no services registered' do it 'is false after run is called with no services registered' do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:poll_period => 1, poll_period: 1,
:server_override => @server server_override: @server
} }
r = RpcServer.new(**opts) r = RpcServer.new(**opts)
r.run() r.run
expect(r.running?).to be(false) expect(r.running?).to be(false)
end end
it 'is true after run is called with a registered service' do it 'is true after run is called with a registered service' do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:poll_period => 1, poll_period: 1,
:server_override => @server server_override: @server
} }
r = RpcServer.new(**opts) r = RpcServer.new(**opts)
r.handle(EchoService) r.handle(EchoService)
@ -265,13 +264,11 @@ describe GRPC::RpcServer do
r.stop r.stop
t.join t.join
end end
end end
describe '#handle' do describe '#handle' do
before(:each) do before(:each) do
@opts = {:a_channel_arg => 'an_arg', :poll_period => 1} @opts = { a_channel_arg: 'an_arg', poll_period: 1 }
@srv = RpcServer.new(**@opts) @srv = RpcServer.new(**@opts)
end end
@ -309,33 +306,30 @@ describe GRPC::RpcServer do
@srv.handle(EchoService) @srv.handle(EchoService)
expect { r.handle(EchoService) }.to raise_error expect { r.handle(EchoService) }.to raise_error
end end
end end
describe '#run' do describe '#run' do
before(:each) do before(:each) do
@client_opts = { @client_opts = {
:channel_override => @ch channel_override: @ch
} }
@marshal = EchoService.rpc_descs[:an_rpc].marshal_proc @marshal = EchoService.rpc_descs[:an_rpc].marshal_proc
@unmarshal = EchoService.rpc_descs[:an_rpc].unmarshal_proc(:output) @unmarshal = EchoService.rpc_descs[:an_rpc].unmarshal_proc(:output)
server_opts = { server_opts = {
:server_override => @server, server_override: @server,
:completion_queue_override => @server_queue, completion_queue_override: @server_queue,
:poll_period => 1 poll_period: 1
} }
@srv = RpcServer.new(**server_opts) @srv = RpcServer.new(**server_opts)
end end
describe 'when running' do describe 'when running' do
it 'should return NOT_FOUND status for requests on unknown methods' do it 'should return NOT_FOUND status for requests on unknown methods' do
@srv.handle(EchoService) @srv.handle(EchoService)
t = Thread.new { @srv.run } t = Thread.new { @srv.run }
@srv.wait_till_running @srv.wait_till_running
req = EchoMsg.new req = EchoMsg.new
blk = Proc.new do blk = proc do
cq = GRPC::Core::CompletionQueue.new cq = GRPC::Core::CompletionQueue.new
stub = GRPC::ClientStub.new(@host, cq, **@client_opts) stub = GRPC::ClientStub.new(@host, cq, **@client_opts)
stub.request_response('/unknown', req, @marshal, @unmarshal) stub.request_response('/unknown', req, @marshal, @unmarshal)
@ -352,20 +346,19 @@ describe GRPC::RpcServer do
req = EchoMsg.new req = EchoMsg.new
n = 5 # arbitrary n = 5 # arbitrary
stub = EchoStub.new(@host, **@client_opts) stub = EchoStub.new(@host, **@client_opts)
n.times { |x| expect(stub.an_rpc(req)).to be_a(EchoMsg) } n.times { expect(stub.an_rpc(req)).to be_a(EchoMsg) }
@srv.stop @srv.stop
t.join t.join
end end
it 'should obtain responses for multiple parallel requests' do it 'should obtain responses for multiple parallel requests' do
@srv.handle(EchoService) @srv.handle(EchoService)
t = Thread.new { @srv.run } Thread.new { @srv.run }
@srv.wait_till_running @srv.wait_till_running
req, q = EchoMsg.new, Queue.new req, q = EchoMsg.new, Queue.new
n = 5 # arbitrary n = 5 # arbitrary
threads = [] threads = []
n.times do |x| n.times do
cq = GRPC::Core::CompletionQueue.new
threads << Thread.new do threads << Thread.new do
stub = EchoStub.new(@host, **@client_opts) stub = EchoStub.new(@host, **@client_opts)
q << stub.an_rpc(req) q << stub.an_rpc(req)
@ -373,44 +366,40 @@ describe GRPC::RpcServer do
end end
n.times { expect(q.pop).to be_a(EchoMsg) } n.times { expect(q.pop).to be_a(EchoMsg) }
@srv.stop @srv.stop
threads.each { |t| t.join } threads.each(&:join)
end end
it 'should return UNAVAILABLE status if there too many jobs' do it 'should return UNAVAILABLE status if there too many jobs' do
opts = { opts = {
:a_channel_arg => 'an_arg', a_channel_arg: 'an_arg',
:server_override => @server, server_override: @server,
:completion_queue_override => @server_queue, completion_queue_override: @server_queue,
:pool_size => 1, pool_size: 1,
:poll_period => 1, poll_period: 1,
:max_waiting_requests => 0 max_waiting_requests: 0
} }
alt_srv = RpcServer.new(**opts) alt_srv = RpcServer.new(**opts)
alt_srv.handle(SlowService) alt_srv.handle(SlowService)
t = Thread.new { alt_srv.run } Thread.new { alt_srv.run }
alt_srv.wait_till_running alt_srv.wait_till_running
req = EchoMsg.new req = EchoMsg.new
n = 5 # arbitrary, use as many to ensure the server pool is exceeded n = 5 # arbitrary, use as many to ensure the server pool is exceeded
threads = [] threads = []
_1_failed_as_unavailable = false one_failed_as_unavailable = false
n.times do |x| n.times do
threads << Thread.new do threads << Thread.new do
cq = GRPC::Core::CompletionQueue.new
stub = SlowStub.new(@host, **@client_opts) stub = SlowStub.new(@host, **@client_opts)
begin begin
stub.an_rpc(req) stub.an_rpc(req)
rescue GRPC::BadStatus => e rescue GRPC::BadStatus => e
_1_failed_as_unavailable = e.code == StatusCodes::UNAVAILABLE one_failed_as_unavailable = e.code == StatusCodes::UNAVAILABLE
end end
end end
end end
threads.each { |t| t.join } threads.each(&:join)
alt_srv.stop alt_srv.stop
expect(_1_failed_as_unavailable).to be(true) expect(one_failed_as_unavailable).to be(true)
end end
end end
end end
end end

@ -31,23 +31,24 @@ require 'grpc'
require 'grpc/generic/rpc_desc' require 'grpc/generic/rpc_desc'
require 'grpc/generic/service' require 'grpc/generic/service'
# A test message that encodes/decodes using marshal/marshal.
class GoodMsg class GoodMsg
def self.marshal(o) def self.marshal(_o)
'' ''
end end
def self.unmarshal(o) def self.unmarshal(_o)
GoodMsg.new GoodMsg.new
end end
end end
# A test message that encodes/decodes using encode/decode.
class EncodeDecodeMsg class EncodeDecodeMsg
def self.encode(o) def self.encode(_o)
'' ''
end end
def self.decode(o) def self.decode(_o)
GoodMsg.new GoodMsg.new
end end
end end
@ -55,7 +56,6 @@ end
GenericService = GRPC::GenericService GenericService = GRPC::GenericService
Dsl = GenericService::Dsl Dsl = GenericService::Dsl
describe 'String#underscore' do describe 'String#underscore' do
it 'should convert CamelCase to underscore separated' do it 'should convert CamelCase to underscore separated' do
expect('AnRPC'.underscore).to eq('an_rpc') expect('AnRPC'.underscore).to eq('an_rpc')
@ -66,20 +66,14 @@ describe 'String#underscore' do
end end
describe Dsl do describe Dsl do
it 'can be included in new classes' do it 'can be included in new classes' do
blk = Proc.new do blk = proc { Class.new { include Dsl } }
c = Class.new { include Dsl }
end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
describe GenericService do describe GenericService do
describe 'including it' do describe 'including it' do
it 'adds a class method, rpc' do it 'adds a class method, rpc' do
c = Class.new do c = Class.new do
include GenericService include GenericService
@ -144,9 +138,8 @@ describe GenericService do
end end
describe '#include' do describe '#include' do
it 'raises if #rpc is missing an arg' do it 'raises if #rpc is missing an arg' do
blk = Proc.new do blk = proc do
Class.new do Class.new do
include GenericService include GenericService
rpc :AnRpc, GoodMsg rpc :AnRpc, GoodMsg
@ -154,7 +147,7 @@ describe GenericService do
end end
expect(&blk).to raise_error ArgumentError expect(&blk).to raise_error ArgumentError
blk = Proc.new do blk = proc do
Class.new do Class.new do
include GenericService include GenericService
rpc :AnRpc rpc :AnRpc
@ -164,9 +157,8 @@ describe GenericService do
end end
describe 'when #rpc args are incorrect' do describe 'when #rpc args are incorrect' do
it 'raises if an arg does not have the marshal or unmarshal methods' do it 'raises if an arg does not have the marshal or unmarshal methods' do
blk = Proc.new do blk = proc do
Class.new do Class.new do
include GenericService include GenericService
rpc :AnRpc, GoodMsg, Object rpc :AnRpc, GoodMsg, Object
@ -176,13 +168,14 @@ describe GenericService do
end end
it 'raises if a type arg only has the marshal method' do it 'raises if a type arg only has the marshal method' do
# a bad message type with only a marshal method
class OnlyMarshal class OnlyMarshal
def marshal(o) def marshal(o)
o o
end end
end end
blk = Proc.new do blk = proc do
Class.new do Class.new do
include GenericService include GenericService
rpc :AnRpc, OnlyMarshal, GoodMsg rpc :AnRpc, OnlyMarshal, GoodMsg
@ -192,12 +185,13 @@ describe GenericService do
end end
it 'raises if a type arg only has the unmarshal method' do it 'raises if a type arg only has the unmarshal method' do
# a bad message type with only an unmarshal method
class OnlyUnmarshal class OnlyUnmarshal
def self.ummarshal(o) def self.ummarshal(o)
o o
end end
end end
blk = Proc.new do blk = proc do
Class.new do Class.new do
include GenericService include GenericService
rpc :AnRpc, GoodMsg, OnlyUnmarshal rpc :AnRpc, GoodMsg, OnlyUnmarshal
@ -208,7 +202,7 @@ describe GenericService do
end end
it 'is ok for services that expect the default {un,}marshal methods' do it 'is ok for services that expect the default {un,}marshal methods' do
blk = Proc.new do blk = proc do
Class.new do Class.new do
include GenericService include GenericService
rpc :AnRpc, GoodMsg, GoodMsg rpc :AnRpc, GoodMsg, GoodMsg
@ -218,7 +212,7 @@ describe GenericService do
end end
it 'is ok for services that override the default {un,}marshal methods' do it 'is ok for services that override the default {un,}marshal methods' do
blk = Proc.new do blk = proc do
Class.new do Class.new do
include GenericService include GenericService
self.marshal_class_method = :encode self.marshal_class_method = :encode
@ -228,11 +222,9 @@ describe GenericService do
end end
expect(&blk).not_to raise_error expect(&blk).not_to raise_error
end end
end end
describe '#rpc_stub_class' do describe '#rpc_stub_class' do
it 'generates a client class that defines any of the rpc methods' do it 'generates a client class that defines any of the rpc methods' do
s = Class.new do s = Class.new do
include GenericService include GenericService
@ -249,7 +241,6 @@ describe GenericService do
end end
describe 'the generated instances' do describe 'the generated instances' do
it 'can be instanciated with just a hostname' do it 'can be instanciated with just a hostname' do
s = Class.new do s = Class.new do
include GenericService include GenericService
@ -277,13 +268,10 @@ describe GenericService do
expect(o.methods).to include(:a_client_streamer) expect(o.methods).to include(:a_client_streamer)
expect(o.methods).to include(:a_bidi_streamer) expect(o.methods).to include(:a_bidi_streamer)
end end
end end
end end
describe '#assert_rpc_descs_have_methods' do describe '#assert_rpc_descs_have_methods' do
it 'fails if there is no instance method for an rpc descriptor' do it 'fails if there is no instance method for an rpc descriptor' do
c1 = Class.new do c1 = Class.new do
include GenericService include GenericService
@ -310,16 +298,16 @@ describe GenericService do
rpc :AClientStreamer, stream(GoodMsg), GoodMsg rpc :AClientStreamer, stream(GoodMsg), GoodMsg
rpc :ABidiStreamer, stream(GoodMsg), stream(GoodMsg) rpc :ABidiStreamer, stream(GoodMsg), stream(GoodMsg)
def an_rpc(req, call) def an_rpc(_req, _call)
end end
def a_server_streamer(req, call) def a_server_streamer(_req, _call)
end end
def a_client_streamer(call) def a_client_streamer(_call)
end end
def a_bidi_streamer(call) def a_bidi_streamer(_call)
end end
end end
expect { c.assert_rpc_descs_have_methods }.to_not raise_error expect { c.assert_rpc_descs_have_methods }.to_not raise_error
@ -330,7 +318,7 @@ describe GenericService do
include GenericService include GenericService
rpc :AnRpc, GoodMsg, GoodMsg rpc :AnRpc, GoodMsg, GoodMsg
def an_rpc(req, call) def an_rpc(_req, _call)
end end
end end
c = Class.new(base) c = Class.new(base)
@ -344,13 +332,11 @@ describe GenericService do
rpc :AnRpc, GoodMsg, GoodMsg rpc :AnRpc, GoodMsg, GoodMsg
end end
c = Class.new(base) do c = Class.new(base) do
def an_rpc(req, call) def an_rpc(_req, _call)
end end
end end
expect { c.assert_rpc_descs_have_methods }.to_not raise_error expect { c.assert_rpc_descs_have_methods }.to_not raise_error
expect(c.include?(GenericService)).to be(true) expect(c.include?(GenericService)).to be(true)
end end
end end
end end

@ -30,7 +30,6 @@
require 'grpc' require 'grpc'
describe GRPC::Core::Metadata do describe GRPC::Core::Metadata do
describe '#new' do describe '#new' do
it 'should create instances' do it 'should create instances' do
expect { GRPC::Core::Metadata.new('a key', 'a value') }.to_not raise_error expect { GRPC::Core::Metadata.new('a key', 'a value') }.to_not raise_error
@ -62,5 +61,4 @@ describe GRPC::Core::Metadata do
expect(md.dup.value).to eq('a value') expect(md.dup.value).to eq('a value')
end end
end end
end end

@ -32,7 +32,7 @@ require 'socket'
# @param [Fixnum] the minimum port number to accept # @param [Fixnum] the minimum port number to accept
# @param [Fixnum] the maximum port number to accept # @param [Fixnum] the maximum port number to accept
# @return [Fixnum ]a free tcp port # @return [Fixnum ]a free tcp port
def find_unused_tcp_port(min=32768, max=60000) def find_unused_tcp_port(min = 32_768, max = 60_000)
# Allow the system to assign a port, by specifying 0. # Allow the system to assign a port, by specifying 0.
# Loop until a port is assigned in the required range # Loop until a port is assigned in the required range
loop do loop do
@ -40,6 +40,6 @@ def find_unused_tcp_port(min=32768, max=60000)
socket.bind(Addrinfo.tcp('127.0.0.1', 0)) socket.bind(Addrinfo.tcp('127.0.0.1', 0))
p = socket.local_address.ip_port p = socket.local_address.ip_port
socket.close socket.close
return p if p > min and p < 60000 return p if p > min && p < max
end end
end end

@ -35,13 +35,10 @@ def load_test_certs
files.map { |f| File.open(File.join(test_root, f)).read } files.map { |f| File.open(File.join(test_root, f)).read }
end end
describe GRPC::Core::ServerCredentials do describe GRPC::Core::ServerCredentials do
Creds = GRPC::Core::ServerCredentials Creds = GRPC::Core::ServerCredentials
describe '#new' do describe '#new' do
it 'can be constructed from a fake CA PEM, server PEM and a server key' do it 'can be constructed from a fake CA PEM, server PEM and a server key' do
expect { Creds.new('a', 'b', 'c') }.not_to raise_error expect { Creds.new('a', 'b', 'c') }.not_to raise_error
end end
@ -53,22 +50,20 @@ describe GRPC::Core::ServerCredentials do
it 'cannot be constructed without a server cert chain' do it 'cannot be constructed without a server cert chain' do
root_cert, server_key, _ = load_test_certs root_cert, server_key, _ = load_test_certs
blk = Proc.new { Creds.new(root_cert, server_key, nil) } blk = proc { Creds.new(root_cert, server_key, nil) }
expect(&blk).to raise_error expect(&blk).to raise_error
end end
it 'cannot be constructed without a server key' do it 'cannot be constructed without a server key' do
root_cert, server_key, _ = load_test_certs root_cert, _, _ = load_test_certs
blk = Proc.new { Creds.new(root_cert, _, cert_chain) } blk = proc { Creds.new(root_cert, nil, cert_chain) }
expect(&blk).to raise_error expect(&blk).to raise_error
end end
it 'can be constructed without a root_cret' do it 'can be constructed without a root_cret' do
_, server_key, cert_chain = load_test_certs _, server_key, cert_chain = load_test_certs
blk = Proc.new { Creds.new(_, server_key, cert_chain) } blk = proc { Creds.new(nil, server_key, cert_chain) }
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
end end

@ -39,7 +39,6 @@ end
Server = GRPC::Core::Server Server = GRPC::Core::Server
describe Server do describe Server do
def create_test_cert def create_test_cert
GRPC::Core::ServerCredentials.new(*load_test_certs) GRPC::Core::ServerCredentials.new(*load_test_certs)
end end
@ -49,11 +48,8 @@ describe Server do
end end
describe '#start' do describe '#start' do
it 'runs without failing' do it 'runs without failing' do
blk = Proc.new do blk = proc { Server.new(@cq, nil).start }
s = Server.new(@cq, nil).start
end
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
@ -62,20 +58,19 @@ describe Server do
s.close s.close
expect { s.start }.to raise_error(RuntimeError) expect { s.start }.to raise_error(RuntimeError)
end end
end end
describe '#destroy' do describe '#destroy' do
it 'destroys a server ok' do it 'destroys a server ok' do
s = start_a_server s = start_a_server
blk = Proc.new { s.destroy } blk = proc { s.destroy }
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'can be called more than once without error' do it 'can be called more than once without error' do
s = start_a_server s = start_a_server
begin begin
blk = Proc.new { s.destroy } blk = proc { s.destroy }
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
blk.call blk.call
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -89,7 +84,7 @@ describe Server do
it 'closes a server ok' do it 'closes a server ok' do
s = start_a_server s = start_a_server
begin begin
blk = Proc.new { s.close } blk = proc { s.close }
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
ensure ensure
s.close s.close
@ -98,7 +93,7 @@ describe Server do
it 'can be called more than once without error' do it 'can be called more than once without error' do
s = start_a_server s = start_a_server
blk = Proc.new { s.close } blk = proc { s.close }
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
blk.call blk.call
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
@ -106,11 +101,9 @@ describe Server do
end end
describe '#add_http_port' do describe '#add_http_port' do
describe 'for insecure servers' do describe 'for insecure servers' do
it 'runs without failing' do it 'runs without failing' do
blk = Proc.new do blk = proc do
s = Server.new(@cq, nil) s = Server.new(@cq, nil)
s.add_http2_port('localhost:0') s.add_http2_port('localhost:0')
s.close s.close
@ -123,13 +116,11 @@ describe Server do
s.close s.close
expect { s.add_http2_port('localhost:0') }.to raise_error(RuntimeError) expect { s.add_http2_port('localhost:0') }.to raise_error(RuntimeError)
end end
end end
describe 'for secure servers' do describe 'for secure servers' do
it 'runs without failing' do it 'runs without failing' do
blk = Proc.new do blk = proc do
s = Server.new(@cq, nil) s = Server.new(@cq, nil)
s.add_http2_port('localhost:0', true) s.add_http2_port('localhost:0', true)
s.close s.close
@ -140,16 +131,13 @@ describe Server do
it 'fails if the server is closed' do it 'fails if the server is closed' do
s = Server.new(@cq, nil) s = Server.new(@cq, nil)
s.close s.close
blk = Proc.new { s.add_http2_port('localhost:0', true) } blk = proc { s.add_http2_port('localhost:0', true) }
expect(&blk).to raise_error(RuntimeError) expect(&blk).to raise_error(RuntimeError)
end end
end end
end end
shared_examples '#new' do shared_examples '#new' do
it 'takes a completion queue with nil channel args' do it 'takes a completion queue with nil channel args' do
expect { Server.new(@cq, nil, create_test_cert) }.to_not raise_error expect { Server.new(@cq, nil, create_test_cert) }.to_not raise_error
end end
@ -162,14 +150,14 @@ describe Server do
end end
it 'does not take a hash with bad values as channel args' do it 'does not take a hash with bad values as channel args' do
blk = construct_with_args(:symbol => Object.new) blk = construct_with_args(symbol: Object.new)
expect(&blk).to raise_error TypeError expect(&blk).to raise_error TypeError
blk = construct_with_args('1' => Hash.new) blk = construct_with_args('1' => Hash.new)
expect(&blk).to raise_error TypeError expect(&blk).to raise_error TypeError
end end
it 'can take a hash with a symbol key as channel args' do it 'can take a hash with a symbol key as channel args' do
blk = construct_with_args(:a_symbol => 1) blk = construct_with_args(a_symbol: 1)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
@ -179,46 +167,41 @@ describe Server do
end end
it 'can take a hash with a string value as channel args' do it 'can take a hash with a string value as channel args' do
blk = construct_with_args(:a_symbol => '1') blk = construct_with_args(a_symbol: '1')
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'can take a hash with a symbol value as channel args' do it 'can take a hash with a symbol value as channel args' do
blk = construct_with_args(:a_symbol => :another_symbol) blk = construct_with_args(a_symbol: :another_symbol)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'can take a hash with a numeric value as channel args' do it 'can take a hash with a numeric value as channel args' do
blk = construct_with_args(:a_symbol => 1) blk = construct_with_args(a_symbol: 1)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
it 'can take a hash with many args as channel args' do it 'can take a hash with many args as channel args' do
args = Hash[127.times.collect { |x| [x.to_s, x] } ] args = Hash[127.times.collect { |x| [x.to_s, x] }]
blk = construct_with_args(args) blk = construct_with_args(args)
expect(&blk).to_not raise_error expect(&blk).to_not raise_error
end end
end end
describe '#new with an insecure channel' do describe '#new with an insecure channel' do
def construct_with_args(a) def construct_with_args(a)
Proc.new { Server.new(@cq, a) } proc { Server.new(@cq, a) }
end end
it_behaves_like '#new' it_behaves_like '#new'
end end
describe '#new with a secure channel' do describe '#new with a secure channel' do
def construct_with_args(a) def construct_with_args(a)
Proc.new { Server.new(@cq, a, create_test_cert) } proc { Server.new(@cq, a, create_test_cert) }
end end
it_behaves_like '#new' it_behaves_like '#new'
end end
def start_a_server def start_a_server
@ -229,5 +212,4 @@ describe Server do
s.start s.start
s s
end end
end end

@ -32,7 +32,6 @@ require 'grpc'
TimeConsts = GRPC::Core::TimeConsts TimeConsts = GRPC::Core::TimeConsts
describe TimeConsts do describe TimeConsts do
before(:each) do before(:each) do
@known_consts = [:ZERO, :INFINITE_FUTURE, :INFINITE_PAST].sort @known_consts = [:ZERO, :INFINITE_FUTURE, :INFINITE_PAST].sort
end end
@ -49,11 +48,9 @@ describe TimeConsts do
end end
end end
end end
end end
describe '#from_relative_time' do describe '#from_relative_time' do
it 'cannot handle arbitrary objects' do it 'cannot handle arbitrary objects' do
expect { TimeConsts.from_relative_time(Object.new) }.to raise_error expect { TimeConsts.from_relative_time(Object.new) }.to raise_error
end end
@ -89,5 +86,4 @@ describe '#from_relative_time' do
expect(abs.to_f).to be_within(epsilon).of(want.to_f) expect(abs.to_f).to be_within(epsilon).of(want.to_f)
end end
end end
end end

Loading…
Cancel
Save