Skip to content
This repository was archived by the owner on Dec 7, 2020. It is now read-only.

Commit ac734e8

Browse files
committed
style fixes, added deprecation warning on sendMSg
1 parent 1cbb6b4 commit ac734e8

File tree

11 files changed

+143
-119
lines changed

11 files changed

+143
-119
lines changed

CHANGELOG

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
0.2.1 (September 22, 2014)
2+
# producer
3+
- fixed argument parser including nil options
4+
- updated compressed.topics to take an array and transform it into comma separated list.
5+
- removed compressed.topics parsing log fallacy.
6+
- tons of inspection/style fixes
17
0.2.0 (September 20, 2014)
28
# producer
39
- exposed close method to producer for cleanup in async threads.

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
## About
1313

14-
This gem is primarily used to wrap most of the [Kafka 0.8.0 high level consumer] and [Kafka 0.8.0 producer] API into
14+
This gem is primarily used to wrap most of the [Kafka 0.8.1.1 high level consumer] and [Kafka 0.8.1.1 producer] API into
1515
jruby.
1616
The [Kafka Consumer Group Example] is pretty much ported to this library.
1717

@@ -21,7 +21,7 @@ The [Kafka Consumer Group Example] is pretty much ported to this library.
2121

2222
## Installation
2323

24-
This package is now distruted via rubygems.org but you can build it using the following instructions.
24+
This package is now distributed via [RubyGems.org](http://rubygems.org) but you can build it using the following instructions.
2525

2626
From the root of the project run:
2727

@@ -57,7 +57,7 @@ make a producer
5757
producer_options = {:topic_id => "test", :broker_list => "localhost:9092"}
5858
producer = Kafka::Producer.new(producer_options)
5959
producer.connect()
60-
producer.sendMsg(nil, "heres a test")
60+
producer.sendMsg(nil, "here's a test")
6161

6262

6363
then a consumer

Rakefile

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,20 @@
11
task :package do
2-
system("gem build jruby-kafka.gemspec")
2+
system('gem build jruby-kafka.gemspec')
33
end
44

55
task :publish do
6-
Rake::Task["clean"].execute
7-
Rake::Task["package"].execute
8-
system("gem push jruby-kafka*.gem")
6+
Rake::Task['clean'].execute
7+
Rake::Task['package'].execute
8+
system('gem push jruby-kafka*.gem')
99
end
1010

1111

1212
task :install do
13-
Rake::Task["package"].execute
14-
system("gem install jruby-kafka*.gem")
15-
Rake::Task["clean"].execute
13+
Rake::Task['package'].execute
14+
system('gem install jruby-kafka*.gem')
15+
Rake::Task['clean'].execute
1616
end
1717

1818
task :clean do
19-
system("rm jruby*.gem")
19+
system('rm jruby*.gem')
2020
end

jruby-kafka.gemspec

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,18 +5,18 @@ Gem::Specification.new do |spec|
55
files += Dir["#{dir}/**/*"]
66
end
77

8-
spec.name = "jruby-kafka"
9-
spec.version = "0.2.0"
10-
spec.authors = ["Joseph Lawson"]
11-
spec.email = ["[email protected]"]
12-
spec.description = "this is primarily to be used as an interface for logstash"
13-
spec.summary = "jruby Kafka wrapper"
14-
spec.homepage = "https://github.com/joekiller/jruby-kafka"
15-
spec.license = "Apache 2.0"
16-
spec.platform = "java"
8+
spec.name = 'jruby-kafka'
9+
spec.version = '0.2.0'
10+
spec.authors = ['Joseph Lawson']
11+
spec.email = ['[email protected]']
12+
spec.description = 'this is primarily to be used as an interface for logstash'
13+
spec.summary = 'jruby Kafka wrapper'
14+
spec.homepage = 'https://github.com/joekiller/jruby-kafka'
15+
spec.license = 'Apache 2.0'
16+
spec.platform = 'java'
1717

1818
spec.files = files
19-
spec.require_paths << "lib"
19+
spec.require_paths << 'lib'
2020

2121
spec.add_dependency 'jbundler', '0.5.5'
2222
spec.requirements << "jar 'org.apache.kafka:kafka_2.9.2', '0.8.1.1'"

lib/jruby-kafka.rb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,12 @@
77
#
88
if not defined? JBUNDLER_CLASSPATH and ENV['KAFKA_PATH']
99
require 'jruby-kafka/loader'
10-
Kafka.load_jars()
10+
Kafka.load_jars
1111
end
1212

13-
require "jruby-kafka/consumer"
14-
require "jruby-kafka/group"
15-
require "jruby-kafka/producer"
13+
require 'jruby-kafka/consumer'
14+
require 'jruby-kafka/group'
15+
require 'jruby-kafka/producer'
1616

1717
module Kafka
1818
end

lib/jruby-kafka/consumer.rb

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,34 @@
1-
require "java"
2-
require "jruby-kafka/namespace"
3-
4-
java_import 'kafka.consumer.ConsumerIterator'
5-
java_import 'kafka.consumer.KafkaStream'
6-
java_import 'kafka.common.ConsumerRebalanceFailedException'
7-
java_import 'kafka.consumer.ConsumerTimeoutException'
1+
require 'java'
2+
require 'jruby-kafka/namespace'
83

4+
# noinspection JRubyStringImportInspection
95
class Kafka::Consumer
6+
java_import 'kafka.consumer.ConsumerIterator'
7+
java_import 'kafka.consumer.KafkaStream'
8+
java_import 'kafka.common.ConsumerRebalanceFailedException'
9+
java_import 'kafka.consumer.ConsumerTimeoutException'
10+
1011
include Java::JavaLang::Runnable
1112
java_signature 'void run()'
1213

1314
@m_stream
14-
@m_threadNumber
15+
@m_thread_number
1516
@m_queue
1617

17-
def initialize(a_stream, a_threadNumber, a_queue, a_bool_restart_on_exception, a_sleep_ms)
18-
@m_threadNumber = a_threadNumber
18+
def initialize(a_stream, a_thread_number, a_queue, restart_on_exception, a_sleep_ms)
19+
@m_thread_number = a_thread_number
1920
@m_stream = a_stream
2021
@m_queue = a_queue
21-
@m_restart_on_exception = a_bool_restart_on_exception
22+
@m_restart_on_exception = restart_on_exception
2223
@m_sleep_ms = 1.0 / 1000.0 * Float(a_sleep_ms)
2324
end
2425

2526
def run
26-
it = @m_stream.iterator()
27+
it = @m_stream.iterator
2728
begin
28-
while it.hasNext()
29+
while it.hasNext
2930
begin
30-
@m_queue << it.next().message()
31+
@m_queue << it.next.message
3132
end
3233
end
3334
rescue Exception => e

lib/jruby-kafka/error.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
require "jruby-kafka/namespace"
1+
require 'jruby-kafka/namespace'
22

33
class KafkaError < StandardError
44
attr_reader :object

lib/jruby-kafka/group.rb

Lines changed: 53 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,17 @@
11
# basically we are porting this https://cwiki.apache.org/confluence/display/KAFKA/Consumer+Group+Example
22

3-
require "java"
3+
require 'java'
44

5-
require "jruby-kafka/namespace"
6-
require "jruby-kafka/consumer"
7-
require "jruby-kafka/error"
8-
9-
java_import 'java.util.concurrent.ExecutorService'
10-
java_import 'java.util.concurrent.Executors'
11-
java_import 'org.I0Itec.zkclient.exception.ZkException'
5+
require 'jruby-kafka/namespace'
6+
require 'jruby-kafka/consumer'
7+
require 'jruby-kafka/error'
128

9+
# noinspection JRubyStringImportInspection
1310
class Kafka::Group
11+
java_import 'java.util.concurrent.ExecutorService'
12+
java_import 'java.util.concurrent.Executors'
13+
java_import 'org.I0Itec.zkclient.exception.ZkException'
14+
1415
@consumer
1516
@executor
1617
@topic
@@ -136,81 +137,80 @@ def initialize(options={})
136137
end
137138
end
138139

139-
private
140-
def validate_required_arguments(options={})
141-
[:zk_connect, :group_id, :topic_id].each do |opt|
142-
raise(ArgumentError, "#{opt} is required.") unless options[opt]
143-
end
144-
end
145-
146140
public
147-
def shutdown()
141+
142+
def shutdown
148143
if @consumer
149-
@consumer.shutdown()
144+
@consumer.shutdown
150145
end
151146
if @executor
152-
@executor.shutdown()
147+
@executor.shutdown
153148
end
154149
@running = false
155150
end
156151

157-
public
158-
def run(a_numThreads, a_queue)
152+
def run(a_num_threads, a_queue)
159153
begin
160154
if @auto_offset_reset == 'smallest'
161155
Java::kafka::utils::ZkUtils.maybeDeletePath(@zk_connect, "/consumers/#{@group_id}")
162156
end
163157

164-
@consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(createConsumerConfig())
158+
@consumer = Java::kafka::consumer::Consumer.createJavaConsumerConnector(create_consumer_config)
165159
rescue ZkException => e
166160
raise KafkaError.new(e), "Got ZkException: #{e}"
167161
end
168-
topicCountMap = java.util.HashMap.new()
169-
thread_value = a_numThreads.to_java Java::int
170-
topicCountMap.put(@topic, thread_value)
171-
consumerMap = @consumer.createMessageStreams(topicCountMap)
172-
streams = Array.new(consumerMap[@topic])
162+
topic_count_map = java.util.HashMap.new
163+
thread_value = a_num_threads.to_java Java::int
164+
topic_count_map.put(@topic, thread_value)
165+
consumer_map = @consumer.createMessageStreams(topic_count_map)
166+
streams = Array.new(consumer_map[@topic])
173167

174-
@executor = Executors.newFixedThreadPool(a_numThreads)
168+
@executor = Executors.newFixedThreadPool(a_num_threads)
175169
@executor_submit = @executor.java_method(:submit, [Java::JavaLang::Runnable.java_class])
176170

177-
threadNumber = 0
178-
for stream in streams
179-
@executor_submit.call(Kafka::Consumer.new(stream, threadNumber, a_queue, @consumer_restart_on_error, @consumer_restart_sleep_ms))
180-
threadNumber += 1
171+
thread_number = 0
172+
streams.each do |stream|
173+
@executor_submit.call(Kafka::Consumer.new(stream, thread_number, a_queue, @consumer_restart_on_error, @consumer_restart_sleep_ms))
174+
thread_number += 1
181175
end
182176
@running = true
183177
end
184178

185-
public
186179
def running?
187180
@running
188181
end
189182

190183
private
191-
def createConsumerConfig()
192-
properties = java.util.Properties.new()
193-
properties.put("zookeeper.connect", @zk_connect)
194-
properties.put("group.id", @group_id)
195-
properties.put("zookeeper.connection.timeout.ms", @zk_connect_timeout)
196-
properties.put("zookeeper.session.timeout.ms", @zk_session_timeout)
197-
properties.put("zookeeper.sync.time.ms", @zk_sync_time)
198-
properties.put("auto.commit.interval.ms", @auto_commit_interval)
199-
properties.put("auto.offset.reset", @auto_offset_reset)
200-
properties.put("rebalance.max.retries", @rebalance_max_retries)
201-
properties.put("rebalance.backoff.ms", @rebalance_backoff_ms)
202-
properties.put("socket.timeout.ms", @socket_timeout_ms)
203-
properties.put("socket.receive.buffer.bytes", @socket_receive_buffer_bytes)
204-
properties.put("fetch.message.max.bytes", @fetch_message_max_bytes)
205-
properties.put("auto.commit.enable", @auto_commit_enable)
206-
properties.put("queued.max.message.chunks", @queued_max_message_chunks)
207-
properties.put("fetch.min.bytes", @fetch_min_bytes)
208-
properties.put("fetch.wait.max.ms", @fetch_wait_max_ms)
209-
properties.put("refresh.leader.backoff.ms", @refresh_leader_backoff_ms)
210-
properties.put("consumer.timeout.ms", @consumer_timeout_ms)
184+
185+
def validate_required_arguments(options={})
186+
[:zk_connect, :group_id, :topic_id].each do |opt|
187+
raise(ArgumentError, "#{opt} is required.") unless options[opt]
188+
end
189+
end
190+
191+
def create_consumer_config
192+
properties = java.util.Properties.new
193+
properties.put('zookeeper.connect', @zk_connect)
194+
properties.put('group.id', @group_id)
195+
properties.put('zookeeper.connection.timeout.ms', @zk_connect_timeout)
196+
properties.put('zookeeper.session.timeout.ms', @zk_session_timeout)
197+
properties.put('zookeeper.sync.time.ms', @zk_sync_time)
198+
properties.put('auto.commit.interval.ms', @auto_commit_interval)
199+
properties.put('auto.offset.reset', @auto_offset_reset)
200+
properties.put('rebalance.max.retries', @rebalance_max_retries)
201+
properties.put('rebalance.backoff.ms', @rebalance_backoff_ms)
202+
properties.put('socket.timeout.ms', @socket_timeout_ms)
203+
properties.put('socket.receive.buffer.bytes', @socket_receive_buffer_bytes)
204+
properties.put('fetch.message.max.bytes', @fetch_message_max_bytes)
205+
properties.put('auto.commit.enable', @auto_commit_enable)
206+
properties.put('queued.max.message.chunks', @queued_max_message_chunks)
207+
properties.put('fetch.min.bytes', @fetch_min_bytes)
208+
properties.put('fetch.wait.max.ms', @fetch_wait_max_ms)
209+
properties.put('refresh.leader.backoff.ms', @refresh_leader_backoff_ms)
210+
properties.put('consumer.timeout.ms', @consumer_timeout_ms)
211211
unless @consumer_id.nil?
212212
properties.put('consumer.id', @consumer_id)
213213
end
214-
return Java::kafka::consumer::ConsumerConfig.new(properties)
214+
Java::kafka::consumer::ConsumerConfig.new(properties)
215215
end
216216
end

0 commit comments

Comments
 (0)