summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLamont Granquist <lamont@scriptkiddie.org>2016-08-09 13:18:58 -0700
committerLamont Granquist <lamont@scriptkiddie.org>2016-08-16 18:41:25 -0700
commit392814db246c40549504d935bed94ad5d6442bfa (patch)
tree7f7000b07f265a5c2a81d9132f8649b6f13835d6
parenteba6379b0048ef33913a03c60677ed521a9c57e1 (diff)
downloadchef-392814db246c40549504d935bed94ad5d6442bfa.tar.gz
fix Lint/UselessAccessModifier cop
-rw-r--r--lib/chef/chef_fs/command_line.rb70
-rw-r--r--lib/chef/chef_fs/file_system.rb276
-rw-r--r--lib/chef/key.rb111
-rw-r--r--lib/chef/provider/package/windows/registry_uninstall_entry.rb2
-rw-r--r--spec/integration/recipes/resource_action_spec.rb36
-rw-r--r--spec/support/shared/integration/knife_support.rb2
6 files changed, 246 insertions, 251 deletions
diff --git a/lib/chef/chef_fs/command_line.rb b/lib/chef/chef_fs/command_line.rb
index c824bc90df..2d887f4780 100644
--- a/lib/chef/chef_fs/command_line.rb
+++ b/lib/chef/chef_fs/command_line.rb
@@ -242,48 +242,50 @@ class Chef
return [ [ :error, old_entry, new_entry, nil, nil, e ] ]
end
- private
+ class << self
+ private
- def self.sort_keys(json_object)
- if json_object.is_a?(Array)
- json_object.map { |o| sort_keys(o) }
- elsif json_object.is_a?(Hash)
- new_hash = {}
- json_object.keys.sort.each { |key| new_hash[key] = sort_keys(json_object[key]) }
- new_hash
- else
- json_object
+ def sort_keys(json_object)
+ if json_object.is_a?(Array)
+ json_object.map { |o| sort_keys(o) }
+ elsif json_object.is_a?(Hash)
+ new_hash = {}
+ json_object.keys.sort.each { |key| new_hash[key] = sort_keys(json_object[key]) }
+ new_hash
+ else
+ json_object
+ end
end
- end
- def self.canonicalize_json(json_text)
- parsed_json = Chef::JSONCompat.parse(json_text)
- sorted_json = sort_keys(parsed_json)
- Chef::JSONCompat.to_json_pretty(sorted_json)
- end
-
- def self.diff_text(old_path, new_path, old_value, new_value)
- # Copy to tempfiles before diffing
- # TODO don't copy things that are already in files! Or find an in-memory diff algorithm
- begin
- new_tempfile = Tempfile.new("new")
- new_tempfile.write(new_value)
- new_tempfile.close
+ def canonicalize_json(json_text)
+ parsed_json = Chef::JSONCompat.parse(json_text)
+ sorted_json = sort_keys(parsed_json)
+ Chef::JSONCompat.to_json_pretty(sorted_json)
+ end
+ def diff_text(old_path, new_path, old_value, new_value)
+ # Copy to tempfiles before diffing
+ # TODO don't copy things that are already in files! Or find an in-memory diff algorithm
begin
- old_tempfile = Tempfile.new("old")
- old_tempfile.write(old_value)
- old_tempfile.close
+ new_tempfile = Tempfile.new("new")
+ new_tempfile.write(new_value)
+ new_tempfile.close
- result = Chef::Util::Diff.new.udiff(old_tempfile.path, new_tempfile.path)
- result = result.gsub(/^--- #{old_tempfile.path}/, "--- #{old_path}")
- result = result.gsub(/^\+\+\+ #{new_tempfile.path}/, "+++ #{new_path}")
- result
+ begin
+ old_tempfile = Tempfile.new("old")
+ old_tempfile.write(old_value)
+ old_tempfile.close
+
+ result = Chef::Util::Diff.new.udiff(old_tempfile.path, new_tempfile.path)
+ result = result.gsub(/^--- #{old_tempfile.path}/, "--- #{old_path}")
+ result = result.gsub(/^\+\+\+ #{new_tempfile.path}/, "+++ #{new_path}")
+ result
+ ensure
+ old_tempfile.close!
+ end
ensure
- old_tempfile.close!
+ new_tempfile.close!
end
- ensure
- new_tempfile.close!
end
end
end
diff --git a/lib/chef/chef_fs/file_system.rb b/lib/chef/chef_fs/file_system.rb
index 69dce54f00..1a8da2fd6b 100644
--- a/lib/chef/chef_fs/file_system.rb
+++ b/lib/chef/chef_fs/file_system.rb
@@ -68,7 +68,7 @@ class Chef
list_from(exact_child, &block)
end
- # Otherwise, go through all children and find any matches
+ # Otherwise, go through all children and find any matches
elsif entry.dir?
results = Parallelizer.parallelize(entry.children) { |child| Chef::ChefFS::FileSystem.list(child, pattern) }
results.flatten(1).each(&block)
@@ -257,172 +257,174 @@ class Chef
[ are_same, a_value, b_value ]
end
- private
-
- # Copy two entries (could be files or dirs)
- def self.copy_entries(src_entry, dest_entry, new_dest_parent, recurse_depth, options, ui, format_path)
- # A NOTE about this algorithm:
- # There are cases where this algorithm does too many network requests.
- # knife upload with a specific filename will first check if the file
- # exists (a "dir" in the parent) before deciding whether to POST or
- # PUT it. If we just tried PUT (or POST) and then tried the other if
- # the conflict failed, we wouldn't need to check existence.
- # On the other hand, we may already have DONE the request, in which
- # case we shouldn't waste time trying PUT if we know the file doesn't
- # exist.
- # Will need to decide how that works with checksums, though.
- error = false
- begin
- dest_path = format_path.call(dest_entry) if ui
- src_path = format_path.call(src_entry) if ui
- if !src_entry.exists?
- if options[:purge]
- # If we would not have uploaded it, we will not purge it.
- if src_entry.parent.can_have_child?(dest_entry.name, dest_entry.dir?)
- if options[:dry_run]
- ui.output "Would delete #{dest_path}" if ui
- else
- begin
- dest_entry.delete(true)
- ui.output "Deleted extra entry #{dest_path} (purge is on)" if ui
- rescue Chef::ChefFS::FileSystem::NotFoundError
- ui.output "Entry #{dest_path} does not exist. Nothing to do. (purge is on)" if ui
+ class << self
+ private
+
+ # Copy two entries (could be files or dirs)
+ def copy_entries(src_entry, dest_entry, new_dest_parent, recurse_depth, options, ui, format_path)
+ # A NOTE about this algorithm:
+ # There are cases where this algorithm does too many network requests.
+ # knife upload with a specific filename will first check if the file
+ # exists (a "dir" in the parent) before deciding whether to POST or
+ # PUT it. If we just tried PUT (or POST) and then tried the other if
+ # the conflict failed, we wouldn't need to check existence.
+ # On the other hand, we may already have DONE the request, in which
+ # case we shouldn't waste time trying PUT if we know the file doesn't
+ # exist.
+ # Will need to decide how that works with checksums, though.
+ error = false
+ begin
+ dest_path = format_path.call(dest_entry) if ui
+ src_path = format_path.call(src_entry) if ui
+ if !src_entry.exists?
+ if options[:purge]
+ # If we would not have uploaded it, we will not purge it.
+ if src_entry.parent.can_have_child?(dest_entry.name, dest_entry.dir?)
+ if options[:dry_run]
+ ui.output "Would delete #{dest_path}" if ui
+ else
+ begin
+ dest_entry.delete(true)
+ ui.output "Deleted extra entry #{dest_path} (purge is on)" if ui
+ rescue Chef::ChefFS::FileSystem::NotFoundError
+ ui.output "Entry #{dest_path} does not exist. Nothing to do. (purge is on)" if ui
+ end
end
- end
- else
- ui.output ("Not deleting extra entry #{dest_path} (purge is off)") if ui
- end
- end
-
- elsif !dest_entry.exists?
- if new_dest_parent.can_have_child?(src_entry.name, src_entry.dir?)
- # If the entry can do a copy directly from filesystem, do that.
- if new_dest_parent.respond_to?(:create_child_from)
- if options[:dry_run]
- ui.output "Would create #{dest_path}" if ui
else
- new_dest_parent.create_child_from(src_entry)
- ui.output "Created #{dest_path}" if ui
+ ui.output ("Not deleting extra entry #{dest_path} (purge is off)") if ui
end
- return
end
- if src_entry.dir?
- if options[:dry_run]
- ui.output "Would create #{dest_path}" if ui
- new_dest_dir = new_dest_parent.child(src_entry.name)
- else
- new_dest_dir = new_dest_parent.create_child(src_entry.name, nil)
- ui.output "Created #{dest_path}" if ui
- end
- # Directory creation is recursive.
- if recurse_depth != 0
- parallel_do(src_entry.children) do |src_child|
- new_dest_child = new_dest_dir.child(src_child.name)
- child_error = copy_entries(src_child, new_dest_child, new_dest_dir, recurse_depth ? recurse_depth - 1 : recurse_depth, options, ui, format_path)
- error ||= child_error
+ elsif !dest_entry.exists?
+ if new_dest_parent.can_have_child?(src_entry.name, src_entry.dir?)
+ # If the entry can do a copy directly from filesystem, do that.
+ if new_dest_parent.respond_to?(:create_child_from)
+ if options[:dry_run]
+ ui.output "Would create #{dest_path}" if ui
+ else
+ new_dest_parent.create_child_from(src_entry)
+ ui.output "Created #{dest_path}" if ui
end
+ return
end
- else
- if options[:dry_run]
- ui.output "Would create #{dest_path}" if ui
- else
- child = new_dest_parent.create_child(src_entry.name, src_entry.read)
- ui.output "Created #{format_path.call(child)}" if ui
- end
- end
- end
-
- else
- # Both exist.
- # If the entry can do a copy directly, do that.
- if dest_entry.respond_to?(:copy_from)
- if options[:force] || compare(src_entry, dest_entry)[0] == false
- if options[:dry_run]
- ui.output "Would update #{dest_path}" if ui
+ if src_entry.dir?
+ if options[:dry_run]
+ ui.output "Would create #{dest_path}" if ui
+ new_dest_dir = new_dest_parent.child(src_entry.name)
+ else
+ new_dest_dir = new_dest_parent.create_child(src_entry.name, nil)
+ ui.output "Created #{dest_path}" if ui
+ end
+ # Directory creation is recursive.
+ if recurse_depth != 0
+ parallel_do(src_entry.children) do |src_child|
+ new_dest_child = new_dest_dir.child(src_child.name)
+ child_error = copy_entries(src_child, new_dest_child, new_dest_dir, recurse_depth ? recurse_depth - 1 : recurse_depth, options, ui, format_path)
+ error ||= child_error
+ end
+ end
else
- dest_entry.copy_from(src_entry, options)
- ui.output "Updated #{dest_path}" if ui
+ if options[:dry_run]
+ ui.output "Would create #{dest_path}" if ui
+ else
+ child = new_dest_parent.create_child(src_entry.name, src_entry.read)
+ ui.output "Created #{format_path.call(child)}" if ui
+ end
end
end
- return
- end
- # If they are different types, log an error.
- if src_entry.dir?
- if dest_entry.dir?
- # If both are directories, recurse into their children
- if recurse_depth != 0
- parallel_do(child_pairs(src_entry, dest_entry)) do |src_child, dest_child|
- child_error = copy_entries(src_child, dest_child, dest_entry, recurse_depth ? recurse_depth - 1 : recurse_depth, options, ui, format_path)
- error ||= child_error
+ else
+ # Both exist.
+
+ # If the entry can do a copy directly, do that.
+ if dest_entry.respond_to?(:copy_from)
+ if options[:force] || compare(src_entry, dest_entry)[0] == false
+ if options[:dry_run]
+ ui.output "Would update #{dest_path}" if ui
+ else
+ dest_entry.copy_from(src_entry, options)
+ ui.output "Updated #{dest_path}" if ui
end
end
- else
- # If they are different types.
- ui.error("File #{src_path} is a directory while file #{dest_path} is a regular file\n") if ui
return
end
- else
- if dest_entry.dir?
- ui.error("File #{src_path} is a regular file while file #{dest_path} is a directory\n") if ui
- return
- else
- # Both are files! Copy them unless we're sure they are the same.'
- if options[:diff] == false
- should_copy = false
- elsif options[:force]
- should_copy = true
- src_value = nil
+ # If they are different types, log an error.
+ if src_entry.dir?
+ if dest_entry.dir?
+ # If both are directories, recurse into their children
+ if recurse_depth != 0
+ parallel_do(child_pairs(src_entry, dest_entry)) do |src_child, dest_child|
+ child_error = copy_entries(src_child, dest_child, dest_entry, recurse_depth ? recurse_depth - 1 : recurse_depth, options, ui, format_path)
+ error ||= child_error
+ end
+ end
else
- are_same, src_value, _dest_value = compare(src_entry, dest_entry)
- should_copy = !are_same
+ # If they are different types.
+ ui.error("File #{src_path} is a directory while file #{dest_path} is a regular file\n") if ui
+ return
end
- if should_copy
- if options[:dry_run]
- ui.output "Would update #{dest_path}" if ui
+ else
+ if dest_entry.dir?
+ ui.error("File #{src_path} is a regular file while file #{dest_path} is a directory\n") if ui
+ return
+ else
+
+ # Both are files! Copy them unless we're sure they are the same.'
+ if options[:diff] == false
+ should_copy = false
+ elsif options[:force]
+ should_copy = true
+ src_value = nil
else
- src_value = src_entry.read if src_value.nil?
- dest_entry.write(src_value)
- ui.output "Updated #{dest_path}" if ui
+ are_same, src_value, _dest_value = compare(src_entry, dest_entry)
+ should_copy = !are_same
+ end
+ if should_copy
+ if options[:dry_run]
+ ui.output "Would update #{dest_path}" if ui
+ else
+ src_value = src_entry.read if src_value.nil?
+ dest_entry.write(src_value)
+ ui.output "Updated #{dest_path}" if ui
+ end
end
end
end
end
+ rescue RubyFileError => e
+ ui.warn "#{format_path.call(e.entry)} #{e.reason}." if ui
+ rescue DefaultEnvironmentCannotBeModifiedError => e
+ ui.warn "#{format_path.call(e.entry)} #{e.reason}." if ui
+ rescue OperationFailedError => e
+ ui.error "#{format_path.call(e.entry)} failed to #{e.operation}: #{e.message}" if ui
+ error = true
+ rescue OperationNotAllowedError => e
+ ui.error "#{format_path.call(e.entry)} #{e.reason}." if ui
+ error = true
end
- rescue RubyFileError => e
- ui.warn "#{format_path.call(e.entry)} #{e.reason}." if ui
- rescue DefaultEnvironmentCannotBeModifiedError => e
- ui.warn "#{format_path.call(e.entry)} #{e.reason}." if ui
- rescue OperationFailedError => e
- ui.error "#{format_path.call(e.entry)} failed to #{e.operation}: #{e.message}" if ui
- error = true
- rescue OperationNotAllowedError => e
- ui.error "#{format_path.call(e.entry)} #{e.reason}." if ui
- error = true
+ error
end
- error
- end
- def self.get_or_create_parent(entry, options, ui, format_path)
- parent = entry.parent
- if parent && !parent.exists?
- parent_path = format_path.call(parent) if ui
- parent_parent = get_or_create_parent(parent, options, ui, format_path)
- if options[:dry_run]
- ui.output "Would create #{parent_path}" if ui
- else
- parent = parent_parent.create_child(parent.name, nil)
- ui.output "Created #{parent_path}" if ui
+ def get_or_create_parent(entry, options, ui, format_path)
+ parent = entry.parent
+ if parent && !parent.exists?
+ parent_path = format_path.call(parent) if ui
+ parent_parent = get_or_create_parent(parent, options, ui, format_path)
+ if options[:dry_run]
+ ui.output "Would create #{parent_path}" if ui
+ else
+ parent = parent_parent.create_child(parent.name, nil)
+ ui.output "Created #{parent_path}" if ui
+ end
end
+ return parent
end
- return parent
- end
- def self.parallel_do(enum, options = {}, &block)
- Chef::ChefFS::Parallelizer.parallel_do(enum, options, &block)
+ def parallel_do(enum, options = {}, &block)
+ Chef::ChefFS::Parallelizer.parallel_do(enum, options, &block)
+ end
end
end
end
diff --git a/lib/chef/key.rb b/lib/chef/key.rb
index 1c25c5daad..38822e8b26 100644
--- a/lib/chef/key.rb
+++ b/lib/chef/key.rb
@@ -201,72 +201,71 @@ class Chef
chef_rest.delete("#{api_base}/#{@actor}/keys/#{@name}")
end
- # Class methods
- def self.from_hash(key_hash)
- if key_hash.has_key?("user")
- key = Chef::Key.new(key_hash["user"], "user")
- elsif key_hash.has_key?("client")
- key = Chef::Key.new(key_hash["client"], "client")
- else
- raise Chef::Exceptions::MissingKeyAttribute, "The hash passed to from_hash does not contain the key 'user' or 'client'. Please pass a hash that defines one of those keys."
+ class << self
+ def from_hash(key_hash)
+ if key_hash.has_key?("user")
+ key = Chef::Key.new(key_hash["user"], "user")
+ elsif key_hash.has_key?("client")
+ key = Chef::Key.new(key_hash["client"], "client")
+ else
+ raise Chef::Exceptions::MissingKeyAttribute, "The hash passed to from_hash does not contain the key 'user' or 'client'. Please pass a hash that defines one of those keys."
+ end
+ key.name key_hash["name"] if key_hash.key?("name")
+ key.public_key key_hash["public_key"] if key_hash.key?("public_key")
+ key.private_key key_hash["private_key"] if key_hash.key?("private_key")
+ key.create_key key_hash["create_key"] if key_hash.key?("create_key")
+ key.expiration_date key_hash["expiration_date"] if key_hash.key?("expiration_date")
+ key
end
- key.name key_hash["name"] if key_hash.key?("name")
- key.public_key key_hash["public_key"] if key_hash.key?("public_key")
- key.private_key key_hash["private_key"] if key_hash.key?("private_key")
- key.create_key key_hash["create_key"] if key_hash.key?("create_key")
- key.expiration_date key_hash["expiration_date"] if key_hash.key?("expiration_date")
- key
- end
-
- def self.from_json(json)
- Chef::Key.from_hash(Chef::JSONCompat.from_json(json))
- end
- def self.json_create(json)
- Chef.log_deprecation("Auto inflation of JSON data is deprecated. Please use Chef::Key#from_json or one of the load_by methods.")
- Chef::Key.from_json(json)
- end
+ def from_json(json)
+ Chef::Key.from_hash(Chef::JSONCompat.from_json(json))
+ end
- def self.list_by_user(actor, inflate = false)
- keys = Chef::ServerAPI.new(Chef::Config[:chef_server_root]).get("users/#{actor}/keys")
- self.list(keys, actor, :load_by_user, inflate)
- end
+ def json_create(json)
+ Chef.log_deprecation("Auto inflation of JSON data is deprecated. Please use Chef::Key#from_json or one of the load_by methods.")
+ Chef::Key.from_json(json)
+ end
- def self.list_by_client(actor, inflate = false)
- keys = Chef::ServerAPI.new(Chef::Config[:chef_server_url]).get("clients/#{actor}/keys")
- self.list(keys, actor, :load_by_client, inflate)
- end
+ def list_by_user(actor, inflate = false)
+ keys = Chef::ServerAPI.new(Chef::Config[:chef_server_root]).get("users/#{actor}/keys")
+ self.list(keys, actor, :load_by_user, inflate)
+ end
- def self.load_by_user(actor, key_name)
- response = Chef::ServerAPI.new(Chef::Config[:chef_server_root]).get("users/#{actor}/keys/#{key_name}")
- Chef::Key.from_hash(response.merge({ "user" => actor }))
- end
+ def list_by_client(actor, inflate = false)
+ keys = Chef::ServerAPI.new(Chef::Config[:chef_server_url]).get("clients/#{actor}/keys")
+ self.list(keys, actor, :load_by_client, inflate)
+ end
- def self.load_by_client(actor, key_name)
- response = Chef::ServerAPI.new(Chef::Config[:chef_server_url]).get("clients/#{actor}/keys/#{key_name}")
- Chef::Key.from_hash(response.merge({ "client" => actor }))
- end
+ def load_by_user(actor, key_name)
+ response = Chef::ServerAPI.new(Chef::Config[:chef_server_root]).get("users/#{actor}/keys/#{key_name}")
+ Chef::Key.from_hash(response.merge({ "user" => actor }))
+ end
- def self.generate_fingerprint(public_key)
- openssl_key_object = OpenSSL::PKey::RSA.new(public_key)
- data_string = OpenSSL::ASN1::Sequence([
- OpenSSL::ASN1::Integer.new(openssl_key_object.public_key.n),
- OpenSSL::ASN1::Integer.new(openssl_key_object.public_key.e),
- ])
- OpenSSL::Digest::SHA1.hexdigest(data_string.to_der).scan(/../).join(":")
- end
+ def load_by_client(actor, key_name)
+ response = Chef::ServerAPI.new(Chef::Config[:chef_server_url]).get("clients/#{actor}/keys/#{key_name}")
+ Chef::Key.from_hash(response.merge({ "client" => actor }))
+ end
- private
+ def generate_fingerprint(public_key)
+ openssl_key_object = OpenSSL::PKey::RSA.new(public_key)
+ data_string = OpenSSL::ASN1::Sequence([
+ OpenSSL::ASN1::Integer.new(openssl_key_object.public_key.n),
+ OpenSSL::ASN1::Integer.new(openssl_key_object.public_key.e),
+ ])
+ OpenSSL::Digest::SHA1.hexdigest(data_string.to_der).scan(/../).join(":")
+ end
- def self.list(keys, actor, load_method_symbol, inflate)
- if inflate
- keys.inject({}) do |key_map, result|
- name = result["name"]
- key_map[name] = Chef::Key.send(load_method_symbol, actor, name)
- key_map
+ def list(keys, actor, load_method_symbol, inflate)
+ if inflate
+ keys.inject({}) do |key_map, result|
+ name = result["name"]
+ key_map[name] = Chef::Key.send(load_method_symbol, actor, name)
+ key_map
+ end
+ else
+ keys
end
- else
- keys
end
end
end
diff --git a/lib/chef/provider/package/windows/registry_uninstall_entry.rb b/lib/chef/provider/package/windows/registry_uninstall_entry.rb
index 3fa00b6081..a693558883 100644
--- a/lib/chef/provider/package/windows/registry_uninstall_entry.rb
+++ b/lib/chef/provider/package/windows/registry_uninstall_entry.rb
@@ -79,8 +79,6 @@ class Chef
attr_reader :uninstall_string
attr_reader :data
- private
-
UNINSTALL_SUBKEY = 'Software\Microsoft\Windows\CurrentVersion\Uninstall'.freeze
end
end
diff --git a/spec/integration/recipes/resource_action_spec.rb b/spec/integration/recipes/resource_action_spec.rb
index 8f6f4b7f46..f3d1321b9b 100644
--- a/spec/integration/recipes/resource_action_spec.rb
+++ b/spec/integration/recipes/resource_action_spec.rb
@@ -139,26 +139,6 @@ module ResourceActionSpec
attr_accessor :ruby_block_converged
end
- public
-
- def foo_public
- "foo_public!"
- end
-
- protected
-
- def foo_protected
- "foo_protected!"
- end
-
- private
-
- def foo_private
- "foo_private!"
- end
-
- public
-
action :access_recipe_dsl do
ActionJackson.ran_action = :access_recipe_dsl
ruby_block "hi there" do
@@ -199,6 +179,22 @@ module ResourceActionSpec
ActionJackson.ran_action = :access_class_method
ActionJackson.succeeded = ActionJackson.ruby_block_converged
end
+
+ def foo_public
+ "foo_public!"
+ end
+
+ protected
+
+ def foo_protected
+ "foo_protected!"
+ end
+
+ private
+
+ def foo_private
+ "foo_private!"
+ end
end
before(:each) {
diff --git a/spec/support/shared/integration/knife_support.rb b/spec/support/shared/integration/knife_support.rb
index 01df9ab1a7..1a374e1b84 100644
--- a/spec/support/shared/integration/knife_support.rb
+++ b/spec/support/shared/integration/knife_support.rb
@@ -111,8 +111,6 @@ module KnifeSupport
end
end
- private
-
class KnifeResult
include ::RSpec::Matchers