diff --git a/.fixtures.yml b/.fixtures.yml index 3aea752f..422c38ca 100644 --- a/.fixtures.yml +++ b/.fixtures.yml @@ -1,13 +1,9 @@ fixtures: repositories: - archive: "https://github.com/voxpupuli/puppet-archive.git" - java: "https://github.com/puppetlabs/puppetlabs-java.git" - stdlib: - repo: "https://github.com/puppetlabs/puppetlabs-stdlib.git" - ref: "4.11.0" - zookeeper: "https://github.com/deric/puppet-zookeeper.git" - systemd: - repo: "git://github.com/camptocamp/puppet-systemd" - ref: "0.2.2" + archive: 'https://github.com/voxpupuli/puppet-archive.git' + java: 'https://github.com/puppetlabs/puppetlabs-java.git' + stdlib: 'https://github.com/puppetlabs/puppetlabs-stdlib.git' + zookeeper: 'https://github.com/deric/puppet-zookeeper.git' + systemd: 'https://github.com/camptocamp/puppet-systemd.git' symlinks: kafka: "#{source_dir}" diff --git a/.gitignore b/.gitignore index 0d629b0c..e9b3cf4b 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,4 @@ Puppetfile.lock *.iml .*.sw? .yardoc/ +Guardfile diff --git a/.msync.yml b/.msync.yml index 540f0cea..4abde220 100644 --- a/.msync.yml +++ b/.msync.yml @@ -1 +1 @@ -modulesync_config_version: '0.21.0' +modulesync_config_version: '0.21.3' diff --git a/.rspec_parallel b/.rspec_parallel new file mode 100644 index 00000000..e4d136b7 --- /dev/null +++ b/.rspec_parallel @@ -0,0 +1 @@ +--format progress diff --git a/.rubocop.yml b/.rubocop.yml index 6c9347f9..d92e4e45 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -503,6 +503,10 @@ Style/ClosingParenthesisIndentation: # RSpec +RSpec/BeforeAfterAll: + Exclude: + - spec/acceptance/**/* + # We don't use rspec in this way RSpec/DescribeClass: Enabled: False @@ -522,6 +526,10 @@ RSpec/RepeatedDescription: RSpec/NestedGroups: Enabled: False +# this is broken on ruby1.9 +Style/IndentHeredoc: + Enabled: False + # disable Yaml safe_load. This is needed to support ruby2.0.0 development envs Security/YAMLLoad: Enabled: false diff --git a/Gemfile b/Gemfile index d35336cb..0914f7cb 100644 --- a/Gemfile +++ b/Gemfile @@ -35,6 +35,7 @@ group :test do gem 'github_changelog_generator', '~> 1.13.0', :require => false if RUBY_VERSION < '2.2.2' gem 'rack', '~> 1.0', :require => false if RUBY_VERSION < '2.2.2' gem 'github_changelog_generator', :require => false if RUBY_VERSION >= '2.2.2' + gem 'parallel_tests', :require => false end group :development do diff --git a/Guardfile b/Guardfile deleted file mode 100644 index d007cdcf..00000000 --- a/Guardfile +++ /dev/null @@ -1,5 +0,0 @@ -notification :off - -guard 'rake', task: 'test' do - watch(%r{^manifests\/(.+)\.pp$}) -end diff --git a/manifests/broker.pp b/manifests/broker.pp index 532d5a82..5637a7ca 100644 --- a/manifests/broker.pp +++ b/manifests/broker.pp @@ -40,6 +40,9 @@ # [*config_dir*] # The directory to create the kafka config files to # +# [*bin_dir*] +# The directory where the kafka scripts are +# # === Examples # # Create a single broker instance which talks to a local zookeeper instance. @@ -49,36 +52,30 @@ # } # class kafka::broker ( - $version = $kafka::params::version, - $scala_version = $kafka::params::scala_version, - $install_dir = $kafka::params::install_dir, - $mirror_url = $kafka::params::mirror_url, - $config = {}, - $config_defaults = $kafka::params::broker_config_defaults, - $install_java = $kafka::params::install_java, - $package_dir = $kafka::params::package_dir, - $service_install = $kafka::params::broker_service_install, - $service_ensure = $kafka::params::broker_service_ensure, - $service_restart = $kafka::params::service_restart, - $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, - $jmx_opts = $kafka::params::broker_jmx_opts, - $heap_opts = $kafka::params::broker_heap_opts, - $log4j_opts = $kafka::params::broker_log4j_opts, - $opts = $kafka::params::broker_opts, - $group_id = $kafka::params::group_id, - $user_id = $kafka::params::user_id, - $config_dir = $kafka::params::config_dir, + $version = $kafka::params::version, + $scala_version = $kafka::params::scala_version, + $install_dir = $kafka::params::install_dir, + Stdlib::HTTPUrl $mirror_url = $kafka::params::mirror_url, + Hash $config = {}, + $config_defaults = $kafka::params::broker_config_defaults, + Boolean $install_java = $kafka::params::install_java, + Integer $limit_nofile = $kafka::params::limit_nofile, + Stdlib::Absolutepath $package_dir = $kafka::params::package_dir, + Boolean $service_install = $kafka::params::broker_service_install, + Enum['running', 'stopped'] $service_ensure = $kafka::params::broker_service_ensure, + Boolean $service_restart = $kafka::params::service_restart, + $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, + $jmx_opts = $kafka::params::broker_jmx_opts, + $heap_opts = $kafka::params::broker_heap_opts, + $log4j_opts = $kafka::params::broker_log4j_opts, + $opts = $kafka::params::broker_opts, + $group_id = $kafka::params::group_id, + $user_id = $kafka::params::user_id, + $config_dir = $kafka::params::config_dir, + $bin_dir = $kafka::params::bin_dir, + $log_dir = $kafka::params::log_dir, ) inherits kafka::params { - validate_re($::osfamily, 'RedHat|Debian\b', "${::operatingsystem} not supported") - validate_re($mirror_url, $kafka::params::mirror_url_regex, "${mirror_url} is not a valid url") - validate_hash($config) - validate_bool($install_java) - validate_absolute_path($package_dir) - validate_bool($service_install) - validate_re($service_ensure, '^(running|stopped)$') - validate_bool($service_restart) - class { '::kafka::broker::install': } -> class { '::kafka::broker::config': } -> class { '::kafka::broker::service': } diff --git a/manifests/broker/service.pp b/manifests/broker/service.pp index e068a6ae..5cf77ef2 100644 --- a/manifests/broker/service.pp +++ b/manifests/broker/service.pp @@ -12,10 +12,13 @@ $service_ensure = $kafka::broker::service_ensure, $service_requires_zookeeper = $kafka::broker::service_requires_zookeeper, $jmx_opts = $kafka::broker::jmx_opts, + $limit_nofile = $kafka::broker::limit_nofile, $log4j_opts = $kafka::broker::log4j_opts, $heap_opts = $kafka::broker::heap_opts, $opts = $kafka::broker::opts, $config_dir = $kafka::broker::config_dir, + $bin_dir = $kafka::broker::bin_dir, + $log_dir = $kafka::broker::log_dir, ) { if $caller_module_name != $module_name { diff --git a/manifests/consumer.pp b/manifests/consumer.pp index 5b5f5a9e..91e40c55 100644 --- a/manifests/consumer.pp +++ b/manifests/consumer.pp @@ -40,6 +40,9 @@ # [*config_dir*] # The directory to create the kafka config files to # +# [*bin_dir*] +# The directory where the kafka scripts are +# # === Examples # # Create the consumer service connecting to a local zookeeper @@ -48,29 +51,25 @@ # config => { 'client.id' => '0', 'zookeeper.connect' => 'localhost:2181' } # } class kafka::consumer ( - $version = $kafka::params::version, - $scala_version = $kafka::params::scala_version, - $install_dir = $kafka::params::install_dir, - $mirror_url = $kafka::params::mirror_url, - $config = {}, - $config_defaults = $kafka::params::consumer_config_defaults, - $service_config = {}, - $service_defaults = $kafka::params::consumer_service_defaults, - $install_java = $kafka::params::install_java, - $package_dir = $kafka::params::package_dir, - $service_restart = $kafka::params::service_restart, - $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, - $consumer_jmx_opts = $kafka::params::consumer_jmx_opts, - $consumer_log4j_opts = $kafka::params::consumer_log4j_opts, - $config_dir = $kafka::params::config_dir, + $version = $kafka::params::version, + $scala_version = $kafka::params::scala_version, + $install_dir = $kafka::params::install_dir, + Stdlib::HTTPUrl $mirror_url = $kafka::params::mirror_url, + $config = {}, + $config_defaults = $kafka::params::consumer_config_defaults, + $service_config = {}, + $service_defaults = $kafka::params::consumer_service_defaults, + Boolean $install_java = $kafka::params::install_java, + Integer $limit_nofile = $kafka::params::limit_nofile, + Stdlib::Absolutepath $package_dir = $kafka::params::package_dir, + Boolean $service_restart = $kafka::params::service_restart, + $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, + $consumer_jmx_opts = $kafka::params::consumer_jmx_opts, + $consumer_log4j_opts = $kafka::params::consumer_log4j_opts, + $config_dir = $kafka::params::config_dir, + $bin_dir = $kafka::params::bin_dir, ) inherits kafka::params { - validate_re($::osfamily, 'RedHat|Debian\b', "${::operatingsystem} not supported") - validate_re($mirror_url, $kafka::params::mirror_url_regex, "${mirror_url} is not a valid url") - validate_bool($install_java) - validate_absolute_path($package_dir) - validate_bool($service_restart) - class { '::kafka::consumer::install': } -> class { '::kafka::consumer::service': } -> Class['kafka::consumer'] diff --git a/manifests/consumer/service.pp b/manifests/consumer/service.pp index c82abc13..fa40058b 100644 --- a/manifests/consumer/service.pp +++ b/manifests/consumer/service.pp @@ -11,6 +11,8 @@ $service_config = $kafka::consumer::service_config, $service_defaults = $kafka::consumer::service_defaults, $service_requires_zookeeper = $kafka::consumer::service_requires_zookeeper, + $limit_nofile = $kafka::consumer::limit_nofile, + $bin_dir = $kafka::consumer::bin_dir, ) { if $caller_module_name != $module_name { diff --git a/manifests/init.pp b/manifests/init.pp index 3124a3ac..eb0e3f76 100644 --- a/manifests/init.pp +++ b/manifests/init.pp @@ -52,6 +52,9 @@ # [*config_dir*] # The directory to create the kafka config files to # +# [*bin_dir*] +# The directory where the kafka scripts are +# # [*log_dir*] # The directory for kafka log files # @@ -59,26 +62,23 @@ # # class kafka ( - $version = $kafka::params::version, - $scala_version = $kafka::params::scala_version, - $install_dir = $kafka::params::install_dir, - $mirror_url = $kafka::params::mirror_url, - $install_java = $kafka::params::install_java, - $package_dir = $kafka::params::package_dir, - $package_name = $kafka::params::package_name, - $package_ensure = $kafka::params::package_ensure, - $group_id = $kafka::params::group_id, - $user_id = $kafka::params::user_id, - $user = $kafka::params::user, - $group = $kafka::params::group, - $config_dir = $kafka::params::config_dir, - $log_dir = $kafka::params::log_dir, + $version = $kafka::params::version, + $scala_version = $kafka::params::scala_version, + $install_dir = $kafka::params::install_dir, + Stdlib::HTTPUrl $mirror_url = $kafka::params::mirror_url, + Boolean $install_java = $kafka::params::install_java, + Stdlib::Absolutepath $package_dir = $kafka::params::package_dir, + $package_name = $kafka::params::package_name, + $package_ensure = $kafka::params::package_ensure, + $group_id = $kafka::params::group_id, + $user_id = $kafka::params::user_id, + $user = $kafka::params::user, + $group = $kafka::params::group, + $config_dir = $kafka::params::config_dir, + $bin_dir = $kafka::params::bin_dir, + $log_dir = $kafka::params::log_dir, ) inherits kafka::params { - validate_re($::osfamily, 'RedHat|Debian\b', "${::operatingsystem} not supported") - validate_bool($install_java) - validate_absolute_path($package_dir) - $basefilename = "kafka_${scala_version}-${version}.tgz" $package_url = "${mirror_url}/kafka/${version}/${basefilename}" diff --git a/manifests/mirror.pp b/manifests/mirror.pp index f0a0f871..88870a09 100644 --- a/manifests/mirror.pp +++ b/manifests/mirror.pp @@ -55,6 +55,9 @@ # [*config_dir*] # The directory to create the kafka config files to # +# [*bin_dir*] +# The directory where the kafka scripts are +# # === Examples # # Create the mirror service connecting to a local zookeeper @@ -64,39 +67,31 @@ # } # class kafka::mirror ( - $version = $kafka::params::version, - $scala_version = $kafka::params::scala_version, - $install_dir = $kafka::params::install_dir, - $mirror_url = $kafka::params::mirror_url, - $consumer_config = {}, - $consumer_config_defaults = $kafka::params::consumer_config_defaults, - $producer_config = {}, - $producer_config_defaults = $kafka::params::producer_config_defaults, - $num_streams = $kafka::params::num_streams, - $num_producers = $kafka::params::num_producers, - $abort_on_send_failure = $kafka::params::abort_on_send_failure, - $install_java = $kafka::params::install_java, - $whitelist = $kafka::params::whitelist, - $blacklist = $kafka::params::blacklist, - $max_heap = $kafka::params::mirror_max_heap, - $package_dir = $kafka::params::package_dir, - $service_restart = $kafka::params::service_restart, - $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, - $mirror_jmx_opts = $kafka::params::mirror_jmx_opts, - $mirror_log4j_opts = $kafka::params::mirror_log4j_opts, - $config_dir = $kafka::params::config_dir, + $version = $kafka::params::version, + $scala_version = $kafka::params::scala_version, + $install_dir = $kafka::params::install_dir, + Stdlib::HTTPUrl $mirror_url = $kafka::params::mirror_url, + $consumer_config = {}, + $consumer_config_defaults = $kafka::params::consumer_config_defaults, + $producer_config = {}, + $producer_config_defaults = $kafka::params::producer_config_defaults, + Integer $num_streams = $kafka::params::num_streams, + Integer $num_producers = $kafka::params::num_producers, + Boolean $abort_on_send_failure = $kafka::params::abort_on_send_failure, + Boolean $install_java = $kafka::params::install_java, + Integer $limit_nofile = $kafka::params::limit_nofile, + $whitelist = $kafka::params::whitelist, + $blacklist = $kafka::params::blacklist, + Pattern[/\d+[g|G|m|M|k|K]/] $max_heap = $kafka::params::mirror_max_heap, + Stdlib::Absolutepath $package_dir = $kafka::params::package_dir, + Boolean $service_restart = $kafka::params::service_restart, + $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, + $mirror_jmx_opts = $kafka::params::mirror_jmx_opts, + $mirror_log4j_opts = $kafka::params::mirror_log4j_opts, + $config_dir = $kafka::params::config_dir, + $bin_dir = $kafka::params::bin_dir, ) inherits kafka::params { - validate_re($::osfamily, 'RedHat|Debian\b', "${::operatingsystem} not supported") - validate_re($mirror_url, $kafka::params::mirror_url_regex, "${mirror_url} is not a valid url") - validate_integer($num_streams) - validate_integer($num_producers) - validate_bool($abort_on_send_failure) - validate_bool($install_java) - validate_re($max_heap, '\d+[g|G|m|M|k|K]', "${max_heap} is not a valid heap size") - validate_absolute_path($package_dir) - validate_bool($service_restart) - class { '::kafka::mirror::install': } -> class { '::kafka::mirror::config': } -> class { '::kafka::mirror::service': } diff --git a/manifests/mirror/service.pp b/manifests/mirror/service.pp index 123f76a8..593a4f62 100644 --- a/manifests/mirror/service.pp +++ b/manifests/mirror/service.pp @@ -18,6 +18,7 @@ $blacklist = $kafka::mirror::blacklist, $max_heap = $kafka::mirror::max_heap, $config_dir = $kafka::params::config_dir, + $bin_dir = $kafka::params::bin_dir, ) inherits ::kafka::params { if $caller_module_name != $module_name { diff --git a/manifests/params.pp b/manifests/params.pp index bdfab603..5ed6dabd 100644 --- a/manifests/params.pp +++ b/manifests/params.pp @@ -8,10 +8,17 @@ # It sets variables according to platform # class kafka::params { + + # this is all only tested on Debian and RedHat + # params gets included everywhere so we can do the validation here + unless $facts['os']['family'] =~ /(RedHat|Debian)/ { + warning("${facts['os']['family']} is not supported") + } $version = '0.9.0.1' $scala_version = '2.11' $install_dir = "/opt/kafka-${scala_version}-${version}" $config_dir = '/opt/kafka/config' + $bin_dir = '/opt/kafka/bin' $log_dir = '/var/log/kafka' $mirror_url = 'http://mirrors.ukfast.co.uk/sites/ftp.apache.org' $install_java = true @@ -47,6 +54,8 @@ -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=9993' $consumer_log4j_opts = $broker_log4j_opts + $limit_nofile = 65536 + $service_restart = true #http://kafka.apache.org/documentation.html#brokerconfigs diff --git a/manifests/producer.pp b/manifests/producer.pp index 4b9df9ad..7064b072 100644 --- a/manifests/producer.pp +++ b/manifests/producer.pp @@ -40,6 +40,9 @@ # [*config_dir*] # The directory to create the kafka config files to # +# [*bin_dir*] +# The directory where the kafka scripts are +# # === Examples # # Create the producer service connecting to a local zookeeper @@ -50,29 +53,24 @@ # class kafka::producer ( $input, - $version = $kafka::params::version, - $scala_version = $kafka::params::scala_version, - $install_dir = $kafka::params::install_dir, - $mirror_url = $kafka::params::mirror_url, - $config = {}, - $config_defaults = $kafka::params::producer_config_defaults, - $service_config = {}, - $service_defaults = $kafka::params::producer_service_defaults, - $install_java = $kafka::params::install_java, - $package_dir = $kafka::params::package_dir, - $service_restart = $kafka::params::service_restart, - $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, - $producer_jmx_opts = $kafka::params::producer_jmx_opts, - $producer_log4j_opts = $kafka::params::producer_log4j_opts, - $config_dir = $kafka::params::config_dir, + $version = $kafka::params::version, + $scala_version = $kafka::params::scala_version, + $install_dir = $kafka::params::install_dir, + Stdlib::HTTPUrl $mirror_url = $kafka::params::mirror_url, + $config = {}, + $config_defaults = $kafka::params::producer_config_defaults, + $service_config = {}, + $service_defaults = $kafka::params::producer_service_defaults, + Boolean $install_java = $kafka::params::install_java, + Stdlib::Absolutepath $package_dir = $kafka::params::package_dir, + Boolean $service_restart = $kafka::params::service_restart, + $service_requires_zookeeper = $kafka::params::service_requires_zookeeper, + $producer_jmx_opts = $kafka::params::producer_jmx_opts, + $producer_log4j_opts = $kafka::params::producer_log4j_opts, + $config_dir = $kafka::params::config_dir, + $bin_dir = $kafka::params::bin_dir, ) inherits kafka::params { - validate_re($::osfamily, 'RedHat|Debian\b', "${::operatingsystem} not supported") - validate_re($mirror_url, $kafka::params::mirror_url_regex, "${mirror_url} is not a valid url") - validate_bool($install_java) - validate_absolute_path($package_dir) - validate_bool($service_restart) - class { '::kafka::producer::install': } -> class { '::kafka::producer::config': } -> class { '::kafka::producer::service': } diff --git a/manifests/producer/service.pp b/manifests/producer/service.pp index 4126f8df..bad030c9 100644 --- a/manifests/producer/service.pp +++ b/manifests/producer/service.pp @@ -12,6 +12,7 @@ $service_config = $kafka::producer::service_config, $service_defaults = $kafka::producer::service_defaults, $service_requires_zookeeper = $kafka::producer::service_requires_zookeeper, + $bin_dir = $kafka::producer::bin_dir, ) { if $caller_module_name != $module_name { diff --git a/metadata.json b/metadata.json index 95cb337a..9519e158 100644 --- a/metadata.json +++ b/metadata.json @@ -10,19 +10,19 @@ "dependencies": [ { "name": "puppet/archive", - "version_requirement": ">= 1.0.0" + "version_requirement": ">= 1.0.0 < 2.0.0" }, { "name": "puppetlabs/java", - "version_requirement": ">= 1.4.2" + "version_requirement": ">= 1.4.2 < 2.0.0" }, { "name": "puppetlabs/stdlib", - "version_requirement": ">= 4.11.0" + "version_requirement": ">= 4.13.1 < 5.0.0" }, { "name": "deric/zookeeper", - "version_requirement": ">= 0.5.1" + "version_requirement": ">= 0.5.1 < 2.0.0" }, { "name": "camptocamp/systemd", @@ -60,7 +60,7 @@ "requirements": [ { "name": "puppet", - "version_requirement": ">= 4.6.1 < 5.0.0" + "version_requirement": ">= 4.7.0 < 5.0.0" } ], "tags": [ diff --git a/spec/acceptance/broker_spec.rb b/spec/acceptance/broker_spec.rb index 7caa9af8..cb9f302b 100644 --- a/spec/acceptance/broker_spec.rb +++ b/spec/acceptance/broker_spec.rb @@ -227,7 +227,8 @@ class { 'kafka::broker': heap_opts => '-Xmx512M -Xmx512M', log4j_opts => '-Dlog4j.configuration=file:/tmp/log4j.properties', jmx_opts => '-Dcom.sun.management.jmxremote', - opts => '-Djava.security.policy=/some/path/my.policy' + opts => '-Djava.security.policy=/some/path/my.policy', + log_dir => '/some/path/to/logs' } EOS @@ -252,6 +253,7 @@ class { 'kafka::broker': it { is_expected.to contain "Environment='KAFKA_HEAP_OPTS=-Xmx512M -Xmx512M'" } it { is_expected.to contain "Environment='KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:/tmp/log4j.properties'" } it { is_expected.to contain "Environment='KAFKA_OPTS=-Djava.security.policy=/some/path/my.policy'" } + it { is_expected.to contain "Environment='LOG_DIR=/some/path/to/logs'" } end describe service('kafka') do diff --git a/spec/acceptance/nodesets/archlinux-2-x64.yml b/spec/acceptance/nodesets/archlinux-2-x64.yml new file mode 100644 index 00000000..89b63003 --- /dev/null +++ b/spec/acceptance/nodesets/archlinux-2-x64.yml @@ -0,0 +1,13 @@ +--- +# This file is managed via modulesync +# https://github.com/voxpupuli/modulesync +# https://github.com/voxpupuli/modulesync_config +HOSTS: + archlinux-2-x64: + roles: + - master + platform: archlinux-2-x64 + box: archlinux/archlinux + hypervisor: vagrant +CONFIG: + type: foss diff --git a/spec/acceptance/nodesets/docker/ubuntu-16.04.yml b/spec/acceptance/nodesets/docker/ubuntu-16.04.yml index 92a93cb7..bac2d5b3 100644 --- a/spec/acceptance/nodesets/docker/ubuntu-16.04.yml +++ b/spec/acceptance/nodesets/docker/ubuntu-16.04.yml @@ -10,7 +10,7 @@ HOSTS: docker_preserve_image: true docker_cmd: '["/sbin/init"]' docker_image_commands: - - 'apt-get install -y net-tools wget' + - 'apt-get install -y net-tools wget locales' - 'locale-gen en_US.UTF-8' CONFIG: trace_limit: 200 diff --git a/spec/classes/broker_spec.rb b/spec/classes/broker_spec.rb index 42e2f218..0591e133 100644 --- a/spec/classes/broker_spec.rb +++ b/spec/classes/broker_spec.rb @@ -101,6 +101,8 @@ context 'defaults' do it { is_expected.to contain_file('kafka.service').that_notifies('Exec[systemctl-daemon-reload]') } + it { is_expected.to contain_file('kafka.service').with_content %r{^LimitNOFILE=65536$} } + it do is_expected.to contain_file('/etc/init.d/kafka').with( ensure: 'absent' diff --git a/spec/classes/consumer_spec.rb b/spec/classes/consumer_spec.rb index 89935eed..ada87784 100644 --- a/spec/classes/consumer_spec.rb +++ b/spec/classes/consumer_spec.rb @@ -71,6 +71,8 @@ context 'defaults' do it { is_expected.to contain_file('kafka-consumer.service').that_notifies('Exec[systemctl-daemon-reload]') } + it { is_expected.to contain_file('kafka-consumer.service').with_content %r{^LimitNOFILE=65536$} } + it do is_expected.to contain_file('/etc/init.d/kafka-consumer').with( ensure: 'absent' diff --git a/spec/classes/mirror_spec.rb b/spec/classes/mirror_spec.rb index 7ea6c9e9..3e815083 100644 --- a/spec/classes/mirror_spec.rb +++ b/spec/classes/mirror_spec.rb @@ -87,6 +87,8 @@ context 'defaults' do it { is_expected.to contain_file('kafka-mirror.service').that_notifies('Exec[systemctl-daemon-reload]') } + it { is_expected.to contain_file('kafka-mirror.service').with_content %r{^LimitNOFILE=65536$} } + it do is_expected.to contain_file('/etc/init.d/kafka-mirror').with( ensure: 'absent' diff --git a/spec/shared_examples_param_validation.rb b/spec/shared_examples_param_validation.rb index e5f93928..9281ee19 100644 --- a/spec/shared_examples_param_validation.rb +++ b/spec/shared_examples_param_validation.rb @@ -50,11 +50,7 @@ common_params.merge(mirror_url: mirror_url) end - if valid_domain && valid_prefix && valid_path && valid_port - it { is_expected.to compile } - else - it { expect { is_expected.to compile }.to raise_error(%r{#{mirror_url} is not a valid url}) } - end + it { is_expected.to compile } if valid_domain && valid_prefix && valid_path && valid_port end end end diff --git a/templates/init.erb b/templates/init.erb index aaf417c7..0d8f6675 100644 --- a/templates/init.erb +++ b/templates/init.erb @@ -25,7 +25,7 @@ KAFKA_USER=kafka when 'kafka' -%> PGREP_PATTERN=kafka.Kafka -DAEMON="/opt/kafka/bin/kafka-server-start.sh" +DAEMON="<%= @bin_dir %>/kafka-server-start.sh" DAEMON_OPTS="<%= @config_dir %>/server.properties" export KAFKA_JMX_OPTS="<%= @jmx_opts %>" @@ -33,10 +33,11 @@ export KAFKA_LOG4J_OPTS="<%= @log4j_opts %>" export KAFKA_HEAP_OPTS="<%= @heap_opts %>" export KAFKA_OPTS="<%= @opts %>" +export LOG_DIR="<%= @log_dir %>" <%- when 'kafka-consumer' -%> PGREP_PATTERN=kafka.tools.ConsoleConsumer -DAEMON="/opt/kafka/bin/kafka-console-consumer.sh" +DAEMON="<%= @bin_dir %>/kafka-console-consumer.sh" DAEMON_OPTS="<% @consumer_service_config.sort.each do |k,v| -%><% unless v.to_s.strip.empty? -%>--<%= k -%>=<%= v.is_a?(Array) ? v.join(',') : v %> <% end -%><% end -%>" export KAFKA_JMX_OPTS="<%= @consumer_jmx_opts %>" @@ -44,7 +45,7 @@ export KAFKA_LOG4J_OPTS="<%= @consumer_log4j_opts %>" <%- when 'kafka-mirror' -%> PGREP_PATTERN=kafka.tools.MirrorMaker -DAEMON="/opt/kafka/bin/kafka-run-class.sh" +DAEMON="<%= @bin_dir %>/kafka-run-class.sh" DAEMON_OPTS="kafka.tools.MirrorMaker --consumer.config <%= @consumer_config -%> --num.streams <%= @num_streams -%> --producer.config <%= @producer_config -%><%- if (scope.function_versioncmp([scope.lookupvar('kafka::version'), '0.9.0.0']) < 0) -%> --num.producers <%= @num_producers -%><%- end -%><%- if !@whitelist.eql?('') -%> --whitelist='<%= @whitelist -%>'<%- end %><%- if !@blacklist.eql?('') -%> --blacklist='<%= @blacklist -%>'<%- end -%> <%= @abort_on_send_failure_opt %>" export KAFKA_HEAP_OPTS="-Xmx<%= @max_heap -%> -Xms<%= @max_heap -%>" @@ -53,7 +54,7 @@ export KAFKA_LOG4J_OPTS="<%= @mirror_log4j_opts %>" <%- when 'kafka-producer' -%> PGREP_PATTERN=kafka.tools.ConsoleProducer -DAEMON="/opt/kafka/bin/kafka-console-producer.sh" +DAEMON="<%= @bin_dir %>/kafka-console-producer.sh" DAEMON_OPTS="<% @producer_service_config.sort.each do |k,v| -%><% unless v.to_s.strip.empty? -%>--<%= k -%>=<%= v.is_a?(Array) ? v.join(',') : v %> <% end -%><% end -%>" PRODUCER_INPUT="<%= @input %>" diff --git a/templates/unit.erb b/templates/unit.erb index 3814939d..701649fd 100644 --- a/templates/unit.erb +++ b/templates/unit.erb @@ -12,28 +12,29 @@ Type=simple User=kafka Group=kafka SyslogIdentifier=<%= @service_name %> -<%- case @service_name +<%- case @service_name when 'kafka' -%> Environment='KAFKA_HEAP_OPTS=<%= @heap_opts %>' Environment='KAFKA_LOG4J_OPTS=<%= @log4j_opts %>' Environment='KAFKA_JMX_OPTS=<%= @jmx_opts %>' Environment='KAFKA_OPTS=<%= @opts %>' -ExecStart=/opt/kafka/bin/kafka-server-start.sh <%= @config_dir %>/server.properties +Environment='LOG_DIR=<%= @log_dir %>' +ExecStart=<%= @bin_dir %>/kafka-server-start.sh <%= @config_dir %>/server.properties <%- when 'kafka-consumer' -%> Environment='KAFKA_LOG4J_OPTS=<%= @consumer_log4j_opts %>' Environment='KAFKA_JMX_OPTS=<%= @consumer_jmx_opts %>' -ExecStart=/opt/kafka/bin/kafka-console-consumer.sh <% @consumer_service_config.sort.each do |k,v| -%><% unless v.to_s.strip.empty? -%>--<%= k -%>=<%= v.is_a?(Array) ? v.join(',') : v %> <% end -%><% end -%> +ExecStart=<%= @bin_dir %>/kafka-console-consumer.sh <% @consumer_service_config.sort.each do |k,v| -%><% unless v.to_s.strip.empty? -%>--<%= k -%>=<%= v.is_a?(Array) ? v.join(',') : v %> <% end -%><% end -%> <%- when 'kafka-mirror' -%> Environment='KAFKA_LOG4J_OPTS=<%= @mirror_log4j_opts %>' Environment='KAFKA_JMX_OPTS=<%= @mirror_jmx_opts %>' Environment='KAFKA_HEAP_OPTS=-Xmx<%= @max_heap -%>' -ExecStart=/opt/kafka/bin/kafka-run-class.sh kafka.tools.MirrorMaker --consumer.config <%= @consumer_config -%> --num.streams <%= @num_streams -%> --producer.config <%= @producer_config -%><%- if (scope.function_versioncmp([scope.lookupvar('kafka::version'), '0.9.0.0']) < 0) -%> --num.producers <%= @num_producers -%><%- end -%><%- if !@whitelist.eql?('') -%> --whitelist='<%= @whitelist -%>'<%- end %><%- if !@blacklist.eql?('') -%> --blacklist='<%= @blacklist -%>'<%- end -%> <%= @abort_on_send_failure_opt %> +ExecStart=<%= @bin_dir %>/kafka-run-class.sh kafka.tools.MirrorMaker --consumer.config <%= @consumer_config -%> --num.streams <%= @num_streams -%> --producer.config <%= @producer_config -%><%- if (scope.function_versioncmp([scope.lookupvar('kafka::version'), '0.9.0.0']) < 0) -%> --num.producers <%= @num_producers -%><%- end -%><%- if !@whitelist.eql?('') -%> --whitelist='<%= @whitelist -%>'<%- end %><%- if !@blacklist.eql?('') -%> --blacklist='<%= @blacklist -%>'<%- end -%> <%= @abort_on_send_failure_opt %> <%- when 'kafka-producer' -%> Environment='KAFKA_LOG4J_OPTS=<%= @producer_log4j_opts %>' Environment='KAFKA_JMX_OPTS=<%= @producer_jmx_opts %>' -ExecStart=/opt/kafka/bin/kafka-console-producer.sh <% @producer_service_config.sort.each do |k,v| -%><% unless v.to_s.strip.empty? -%>--<%= k -%>=<%= v.is_a?(Array) ? v.join(',') : v %> <% end -%><% end -%> <%= @input %> -<%- end -%> -LimitNOFILE=65536 +ExecStart=<%= @bin_dir %>/kafka-console-producer.sh <% @producer_service_config.sort.each do |k,v| -%><% unless v.to_s.strip.empty? -%>--<%= k -%>=<%= v.is_a?(Array) ? v.join(',') : v %> <% end -%><% end -%> <%= @input %> +<%- end %> +LimitNOFILE=<%= @limit_nofile %> LimitCORE=infinity [Install]