summaryrefslogtreecommitdiff
path: root/lib/chef/provider/package
diff options
context:
space:
mode:
authorLamont Granquist <lamont@scriptkiddie.org>2017-12-12 14:33:49 -0800
committerLamont Granquist <lamont@scriptkiddie.org>2018-03-15 18:03:28 -0700
commit87f7236b16b1ff446360defd3bcd6c52e6771ec7 (patch)
tree8c0a7cdf831f93f46812ba9a80171eba598d2e87 /lib/chef/provider/package
parentfb61966bbc7a2d8920d772ff7f3f3fc8543b107d (diff)
downloadchef-87f7236b16b1ff446360defd3bcd6c52e6771ec7.tar.gz
Yum package provider rewrite
squash and rebase of all the work Signed-off-by: Lamont Granquist <lamont@scriptkiddie.org>
Diffstat (limited to 'lib/chef/provider/package')
-rw-r--r--lib/chef/provider/package/yum.rb541
-rw-r--r--lib/chef/provider/package/yum/python_helper.rb219
-rw-r--r--lib/chef/provider/package/yum/simplejson/LICENSE.txt79
-rw-r--r--lib/chef/provider/package/yum/simplejson/__init__.py318
-rw-r--r--lib/chef/provider/package/yum/simplejson/__init__.pycbin0 -> 12059 bytes
-rw-r--r--lib/chef/provider/package/yum/simplejson/decoder.py354
-rw-r--r--lib/chef/provider/package/yum/simplejson/decoder.pycbin0 -> 11088 bytes
-rw-r--r--lib/chef/provider/package/yum/simplejson/encoder.py440
-rw-r--r--lib/chef/provider/package/yum/simplejson/encoder.pycbin0 -> 13588 bytes
-rw-r--r--lib/chef/provider/package/yum/simplejson/scanner.py65
-rw-r--r--lib/chef/provider/package/yum/simplejson/scanner.pycbin0 -> 2405 bytes
-rw-r--r--lib/chef/provider/package/yum/simplejson/tool.py37
-rw-r--r--lib/chef/provider/package/yum/version.rb56
-rw-r--r--lib/chef/provider/package/yum/yum-dump.py307
-rw-r--r--lib/chef/provider/package/yum/yum_cache.rb333
-rw-r--r--lib/chef/provider/package/yum/yum_helper.py198
16 files changed, 1919 insertions, 1028 deletions
diff --git a/lib/chef/provider/package/yum.rb b/lib/chef/provider/package/yum.rb
index 4151b88242..94f3254aaf 100644
--- a/lib/chef/provider/package/yum.rb
+++ b/lib/chef/provider/package/yum.rb
@@ -1,6 +1,5 @@
-
-# Author:: Adam Jacob (<adam@chef.io>)
-# Copyright:: Copyright 2008-2017, Chef Software Inc.
+#
+# Copyright:: Copyright 2016-2018, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,483 +15,224 @@
# limitations under the License.
#
-require "chef/config"
require "chef/provider/package"
require "chef/resource/yum_package"
+require "chef/mixin/which"
+require "chef/mixin/shell_out"
require "chef/mixin/get_source_from_package"
-require "chef/provider/package/yum/rpm_utils"
+require "chef/provider/package/yum/python_helper"
+require "chef/provider/package/yum/version"
+# the stubs in the YumCache class are still an external API
require "chef/provider/package/yum/yum_cache"
class Chef
class Provider
class Package
class Yum < Chef::Provider::Package
+ extend Chef::Mixin::Which
+ extend Chef::Mixin::ShellOut
include Chef::Mixin::GetSourceFromPackage
- provides :package, platform_family: %w{rhel fedora amazon}
- provides :yum_package, os: "linux"
-
- # Multipackage API
allow_nils
use_multipackage_api
use_package_name_for_source
- # Overload the Package provider to keep track of the YumCache
- def initialize(new_resource, run_context)
- super
+ provides :package, platform_family: %w{fedora amazon} do
+ which("yum")
+ end
+
+ provides :package, platform_family: %w{rhel}, platform_version: ">= 8"
+
+ provides :yum_package, os: "linux"
- @yum = YumCache.instance
- @yum.yum_binary = yum_binary
+ #
+ # Most of the magic in this class happens in the python helper script. The ruby side of this
+ # provider knows only enough to translate Chef-style new_resource name+package+version into
+ # a request to the python side. The python side is then responsible for knowing everything
+ # about RPMs and what is installed and what is available. The ruby side of this class should
+ # remain a lightweight translation layer to translate Chef requests into RPC requests to
+ # python. This class knows nothing about how to compare RPM versions, and does not maintain
+ # any cached state of installed/available versions and should be kept that way.
+ #
+ def python_helper
+ @python_helper ||= PythonHelper.instance
end
- # @see Chef::Provider::Package#check_resource_semantics!
- def check_resource_semantics!
- super
+ def load_current_resource
+ flushcache if new_resource.flush_cache[:before]
- if !new_resource.version.nil? && package_name_array.length != new_version_array.length
- raise Chef::Exceptions::InvalidResourceSpecification, "Please provide a version for each package. Use `nil` for default version."
- end
+ @current_resource = Chef::Resource::YumPackage.new(new_resource.name)
+ current_resource.package_name(new_resource.package_name)
+ current_resource.version(get_current_versions)
- if !new_resource.arch.nil? && package_name_array.length != safe_arch_array.length
- raise Chef::Exceptions::InvalidResourceSpecification, "Please provide an architecture for each package. Use `nil` for default architecture."
- end
+ current_resource
end
- # @see Chef::Provider#define_resource_requirements
def define_resource_requirements
- super
-
- # Ensure that the source file (if specified) is present on the file system
requirements.assert(:install, :upgrade, :remove, :purge) do |a|
a.assertion { !new_resource.source || ::File.exist?(new_resource.source) }
a.failure_message Chef::Exceptions::Package, "Package #{new_resource.package_name} not found: #{new_resource.source}"
a.whyrun "assuming #{new_resource.source} would have previously been created"
end
- end
-
- # @see Chef::Provider#load_current_resource
- def load_current_resource
- @yum.reload if flush_cache[:before]
- manage_extra_repo_control
-
- if new_resource.source
- query_source_file
- else
- # At this point package_name could be:
- #
- # 1) a package name, eg: "foo"
- # 2) a package name.arch, eg: "foo.i386"
- # 3) or a dependency, eg: "foo >= 1.1"
- #
- # In the third case, we want to convert those dependency strings into
- # packages that we can actually install
- convert_dependency_strings_into_packages
-
- # Fill out the rest of the details by querying the Yum Cache
- query_yum_cache
- end
- @current_resource = Chef::Resource::YumPackage.new(new_resource.name)
- current_resource.package_name(new_resource.package_name)
- current_resource.version(@installed_version)
- current_resource
+ super
end
- # @see Chef::Provider::Package#package_locked
- def package_locked(name, version)
- locked = shell_out_with_timeout!("yum versionlock")
- locked_packages = locked.stdout.each_line.map do |line|
- line.sub(/-[^-]*-[^-]*$/, "").split(":").last.strip
+ def candidate_version
+ package_name_array.each_with_index.map do |pkg, i|
+ available_version(i).version_with_arch
end
- name.all? { |n| locked_packages.include? n }
end
- #
- # Package Action Classes
- #
-
- # @see Chef::Provider::Package#install_package
- def install_package(name, version)
- if new_resource.source
- yum_command("-d0 -e0 -y#{expand_options(new_resource.options)} localinstall #{new_resource.source}")
- else
- install_remote_package(name, version)
+ def get_current_versions
+ package_name_array.each_with_index.map do |pkg, i|
+ installed_version(i).version_with_arch
end
-
- flush_cache[:after] ? @yum.reload : @yum.reload_installed
- end
-
- # @see Chef::Provider::Package#upgrade_package
- def upgrade_package(name, version)
- install_package(name, version)
- end
-
- # @see Chef::Provider::Package#remove_package
- def remove_package(name, version)
- remove_str = full_package_name(name, version).join(" ")
- yum_command("-d0 -e0 -y#{expand_options(new_resource.options)} remove #{remove_str}")
-
- flush_cache[:after] ? @yum.reload : @yum.reload_installed
end
- # @see Chef::Provider::Package#purge_package
- def purge_package(name, version)
- remove_package(name, version)
- end
+ def install_package(names, versions)
+ method = nil
+ methods = []
+ names.each_with_index do |n, i|
+ next if n.nil?
- # @see Chef::Provider::Package#lock_package
- def lock_package(name, version)
- lock_str = full_package_name(name, as_array(name).map { nil }).join(" ")
- yum_command("-d0 -e0 -y#{expand_options(new_resource.options)} versionlock add #{lock_str}")
- end
+ av = available_version(i)
- # @see Chef::Provider::Package#unlock_package
- def unlock_package(name, version)
- unlock_str = full_package_name(name, as_array(name).map { nil }).join(" ")
- yum_command("-d0 -e0 -y#{expand_options(new_resource.options)} versionlock delete #{unlock_str}")
- end
+ name = av.name # resolve the name via the available/candidate version
- private
+ iv = python_helper.package_query(:whatinstalled, name)
- #
- # System Level Yum Operations
- #
+ method = "install"
- def yum_binary
- @yum_binary ||=
- begin
- yum_binary = new_resource.yum_binary if new_resource.is_a?(Chef::Resource::YumPackage)
- yum_binary ||= ::File.exist?("/usr/bin/yum-deprecated") ? "yum-deprecated" : "yum"
+ # If this is a package like the kernel that can be installed multiple times, we'll skip over this logic
+ if new_resource.allow_downgrade && version_gt?(iv.version_with_arch, av.version_with_arch) && !python_helper.install_only_packages(name)
+ # We allow downgrading only in the evenit of single-package
+ # rules where the user explicitly allowed it
+ method = "downgrade"
end
- end
- def version_compare(v1, v2)
- RPMVersion.parse(v1) <=> RPMVersion.parse(v2)
- end
+ methods << method
+ end
- # Enable or disable YumCache extra_repo_control
- def manage_extra_repo_control
- if new_resource.options
- repo_control = []
- new_resource.options.each do |opt|
- repo_control << opt if opt =~ /--(enable|disable)repo=.+/
- end
+ # We could split this up into two commands if we wanted to, but
+ # for now, just don't support this.
+ if methods.uniq.length > 1
+ raise Chef::Exceptions::Package, "Multipackage rule has a mix of upgrade and downgrade packages. Cannot proceed."
+ end
- if !repo_control.empty?
- @yum.enable_extra_repo_control(repo_control.join(" "))
- else
- @yum.disable_extra_repo_control
- end
+ if new_resource.source
+ yum(options, "-y #{method}", new_resource.source)
else
- @yum.disable_extra_repo_control
+ resolved_names = names.each_with_index.map { |name, i| available_version(i).to_s unless name.nil? }
+ yum(options, "-y #{method}", resolved_names)
end
+ flushcache
end
- # Query the Yum cache for information about potential packages
- def query_yum_cache
- installed_versions = []
- candidate_versions = []
+ # yum upgrade does not work on uninstalled packaged, while install will upgrade
+ alias upgrade_package install_package
- package_name_array.each_with_index do |n, idx|
- pkg_name, eval_pkg_arch = parse_arch(n)
-
- # Defer to the arch property for the desired package architecture
- pkg_arch = safe_arch_array[idx] || eval_pkg_arch
- set_package_name(idx, pkg_name)
- set_package_arch(idx, pkg_arch)
-
- Chef::Log.debug("#{new_resource} checking yum info for #{yum_syntax(n, nil, pkg_arch)}")
- installed_versions << iv = @yum.installed_version(pkg_name, pkg_arch)
- candidate_versions << cv = @yum.candidate_version(pkg_name, pkg_arch)
-
- Chef::Log.debug("Found Yum package: #{pkg_name} installed version: #{iv || '(none)'} candidate version: #{cv || '(none)'}")
- end
-
- @installed_version = installed_versions.length > 1 ? installed_versions : installed_versions[0]
- @candidate_version = candidate_versions.length > 1 ? candidate_versions : candidate_versions[0]
+ def remove_package(names, versions)
+ resolved_names = names.each_with_index.map { |name, i| installed_version(i).to_s unless name.nil? }
+ yum(options, "-y remove", resolved_names)
+ flushcache
end
- # Query the provided source file for the package name and version
- def query_source_file
- Chef::Log.debug("#{new_resource} checking rpm status")
- shell_out_with_timeout!("rpm -qp --queryformat '%{NAME} %{VERSION}-%{RELEASE} %{ARCH}\n' #{new_resource.source}", timeout: Chef::Config[:yum_timeout]).stdout.each_line do |line|
- case line
- when /^(\S+)\s(\S+)\s(\S+)$/
- n = $1
- v = $2
- a = $3
-
- unless new_resource.package_name == n
- Chef::Log.debug("#{new_resource} updating package_name from #{new_resource.package_name} to #{n} (per #{new_resource.source})")
- new_resource.package_name(n)
- end
-
- unless new_resource.version == v
- Chef::Log.debug("#{new_resource} updating version from #{new_resource.version} to #{v} (per #{new_resource.source})")
- new_resource.version(v)
- end
-
- unless new_resource.arch == a
- Chef::Log.debug("#{new_resource} updating architecture from #{new_resource.arch} to #{a} (per #{new_resource.source})")
- new_resource.arch(a)
- end
- end
- end
+ alias purge_package remove_package
- @installed_version = @yum.installed_version(new_resource.package_name, new_resource.arch)
- @candidate_version = new_resource.version
+ action :flush_cache do
+ flushcache
end
- def yum_command(command)
- command = "#{yum_binary} #{command}"
- Chef::Log.debug("#{new_resource}: yum command: \"#{command}\"")
- status = shell_out_with_timeout(command, timeout: Chef::Config[:yum_timeout])
-
- # This is fun: rpm can encounter errors in the %post/%postun scripts which aren't
- # considered fatal - meaning the rpm is still successfully installed. These issue
- # cause yum to emit a non fatal warning but still exit(1). As there's currently no
- # way to suppress this behavior and an exit(1) will break a Chef run we make an
- # effort to trap these and re-run the same install command - it will either fail a
- # second time or succeed.
- #
- # A cleaner solution would have to be done in python and better hook into
- # yum/rpm to handle exceptions as we see fit.
- if status.exitstatus == 1
- status.stdout.each_line do |l|
- # rpm-4.4.2.3 lib/psm.c line 2182
- next unless l =~ /^error: %(post|postun)\(.*\) scriptlet failed, exit status \d+$/
- Chef::Log.warn("#{new_resource} caught non-fatal scriptlet issue: \"#{l}\". Can't trust yum exit status " \
- "so running install again to verify.")
- status = shell_out_with_timeout(command, timeout: Chef::Config[:yum_timeout])
- break
- end
- end
+ private
- if status.exitstatus > 0
- command_output = "STDOUT: #{status.stdout}\nSTDERR: #{status.stderr}"
- raise Chef::Exceptions::Exec, "#{command} returned #{status.exitstatus}:\n#{command_output}"
- end
+ def version_gt?(v1, v2)
+ return false if v1.nil? || v2.nil?
+ python_helper.compare_versions(v1, v2) == 1
end
- def install_remote_package(name, version)
- # Work around yum not exiting with an error if a package doesn't exist for CHEF-2062.
- all_avail = as_array(name).zip(as_array(version), safe_arch_array).any? do |n, v, a|
- @yum.version_available?(n, v, a)
- end
-
- method = log_method = nil
- methods = []
- if all_avail
- # More Yum fun:
- #
- # yum install of an old name+version will exit(1)
- # yum install of an old name+version+arch will exit(0) for some reason
- #
- # Some packages can be installed multiple times like the kernel
- as_array(name).zip(current_version_array, as_array(version), safe_arch_array).each do |n, cv, v, a|
- next if n.nil?
-
- method = "install"
- log_method = "installing"
-
- unless @yum.allow_multi_install.include?(n)
- if RPMVersion.parse(cv) > RPMVersion.parse(v)
- # We allow downgrading only in the evenit of single-package
- # rules where the user explicitly allowed it
- if allow_downgrade
- method = "downgrade"
- log_method = "downgrading"
- else
- # we bail like yum when the package is older
- raise Chef::Exceptions::Package, "Installed package #{yum_syntax(n, cv, a)} is newer " \
- "than candidate package #{yum_syntax(n, v, a)}"
- end
- end
- end
- # methods don't count for packages we won't be touching
- next if RPMVersion.parse(cv) == RPMVersion.parse(v)
- methods << method
- end
-
- # We could split this up into two commands if we wanted to, but
- # for now, just don't support this.
- if methods.uniq.length > 1
- raise Chef::Exceptions::Package, "Multipackage rule #{name} has a mix of upgrade and downgrade packages. Cannot proceed."
- end
-
- repos = []
- pkg_string_bits = []
- as_array(name).zip(current_version_array, as_array(version), safe_arch_array).each do |n, cv, v, a|
- next if n.nil?
- next if v == cv
- s = yum_syntax(n, v, a)
- repo = @yum.package_repository(n, v, a)
- repos << "#{s} from #{repo} repository"
- pkg_string_bits << s
- end
- pkg_string = pkg_string_bits.join(" ")
- Chef::Log.info("#{new_resource} #{log_method} #{repos.join(' ')}")
- yum_command("-d0 -e0 -y#{expand_options(new_resource.options)} #{method} #{pkg_string}")
- else
- raise Chef::Exceptions::Package, "Version #{version} of #{name} not found. Did you specify both version " \
- "and release? (version-release, e.g. 1.84-10.fc6)"
- end
+ def version_equals?(v1, v2)
+ return false if v1.nil? || v2.nil?
+ python_helper.compare_versions(v1, v2) == 0
end
- # Allow for foo.x86_64 style package_name like yum uses in it's output
- def parse_arch(package_name)
- if package_name =~ /^(.*)\.(.*)$/
- new_package_name = $1
- new_arch = $2
- # foo.i386 and foo.beta1 are both valid package names or expressions of an arch.
- # Ensure we don't have an existing package matching package_name, then ensure we at
- # least have a match for the new_package+new_arch before we overwrite. If neither
- # then fall through to standard package handling.
- old_installed = @yum.installed_version(package_name)
- old_candidate = @yum.candidate_version(package_name)
- new_installed = @yum.installed_version(new_package_name, new_arch)
- new_candidate = @yum.candidate_version(new_package_name, new_arch)
- if (old_installed.nil? && old_candidate.nil?) && (new_installed || new_candidate)
- Chef::Log.debug("Parsed out arch #{new_arch}, new package name is #{new_package_name}")
- return new_package_name, new_arch
- end
- end
- [package_name, nil]
+ def version_compare(v1, v2)
+ return false if v1.nil? || v2.nil?
+ python_helper.compare_versions(v1, v2)
end
- #
- # Dependency String Handling
- #
-
- # Iterate through the list of package_names given to us by the user and
- # see if any of them are in the depenency format ("foo >= 1.1"). Modify
- # the list of packages and versions to incorporate those values.
- def convert_dependency_strings_into_packages
- package_name_array.each_with_index do |n, index|
- next if @yum.package_available?(n)
- # If they aren't in the installed packages they could be a dependency.
- dep = parse_dependency(n, new_version_array[index])
- if dep
- if new_resource.package_name.is_a?(Array)
- new_resource.package_name(package_name_array - [n] + [dep.first])
- new_resource.version(new_version_array - [new_version_array[index]] + [dep.last]) if dep.last
- else
- new_resource.package_name(dep.first)
- new_resource.version(dep.last) if dep.last
- end
- end
- end
+ # Generate the yum syntax for the package
+ def yum_syntax(name, version, arch)
+ s = name
+ s += "-#{version}" if version
+ s += ".#{arch}" if arch
+ s
end
- # If we don't have the package we could have been passed a 'whatprovides' feature
- #
- # eg: yum install "perl(Config)"
- # yum install "mtr = 2:0.71-3.1"
- # yum install "mtr > 2:0.71"
- #
- # We support resolving these out of the Provides data imported from yum-dump.py and
- # matching them up with an actual package so the standard resource handling can apply.
- #
- # There is currently no support for filename matching.
- #
- # Note: This was largely left alone during the multipackage refactor
- def parse_dependency(name, version)
- # Transform the package_name into a requirement
-
- # If we are passed a version or a version constraint we have to assume it's a requirement first. If it can't be
- # parsed only yum_require.name will be set and new_resource.version will be left intact
- require_string = if version
- "#{name} #{version}"
- else
- # Transform the package_name into a requirement, might contain a version, could just be
- # a match for virtual provides
- name
- end
- yum_require = RPMRequire.parse(require_string)
- # and gather all the packages that have a Provides feature satisfying the requirement.
- # It could be multiple be we can only manage one
- packages = @yum.packages_from_require(yum_require)
-
- if packages.empty?
- # Don't bother if we are just ensuring a package is removed - we don't need Provides data
- actions = Array(new_resource.action)
- unless actions.size == 1 && (actions[0] == :remove || actions[0] == :purge)
- Chef::Log.debug("#{new_resource} couldn't match #{new_resource.package_name} in " \
- "installed Provides, loading available Provides - this may take a moment")
- @yum.reload_provides
- packages = @yum.packages_from_require(yum_require)
+ def resolve_source_to_version_obj
+ shell_out_with_timeout!("rpm -qp --queryformat '%{NAME} %{EPOCH} %{VERSION} %{RELEASE} %{ARCH}\n' #{new_resource.source}").stdout.each_line do |line|
+ # this is another case of committing the sin of doing some lightweight mangling of RPM versions in ruby -- but the output of the rpm command
+ # does not match what the yum library accepts.
+ case line
+ when /^(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)$/
+ return Version.new($1, "#{$2 == '(none)' ? '0' : $2}:#{$3}-#{$4}", $5)
end
end
+ end
- unless packages.empty?
- new_package_name = packages.first.name
- new_package_version = packages.first.version.to_s
- debug_msg = "#{name}: Unable to match package '#{name}' but matched #{packages.size} "
- debug_msg << (packages.size == 1 ? "package" : "packages")
- debug_msg << ", selected '#{new_package_name}' version '#{new_package_version}'"
- Chef::Log.debug(debug_msg)
-
- # Ensure it's not the same package under a different architecture
- unique_names = []
- packages.each do |pkg|
- unique_names << "#{pkg.name}-#{pkg.version.evr}"
- end
- unique_names.uniq!
-
- if unique_names.size > 1
- Chef::Log.warn("#{new_resource} matched multiple Provides for #{new_resource.package_name} " \
- "but we can only use the first match: #{new_package_name}. Please use a more " \
- "specific version.")
- end
+ # @returns Array<Version>
+ def available_version(index)
+ @available_version ||= []
- if yum_require.version.to_s.nil?
- new_package_version = nil
- end
+ @available_version[index] ||= if new_resource.source
+ resolve_source_to_version_obj
+ else
+ python_helper.package_query(:whatavailable, package_name_array[index], safe_version_array[index], safe_arch_array[index], options)
+ end
- [new_package_name, new_package_version]
- end
+ @available_version[index]
end
- #
- # Misc Helpers
- #
+ # @returns Array<Version>
+ def installed_version(index)
+ @installed_version ||= []
+ @installed_version[index] ||= if new_resource.source
+ python_helper.package_query(:whatinstalled, available_version(index).name, safe_version_array[index], safe_arch_array[index])
+ else
+ python_helper.package_query(:whatinstalled, package_name_array[index], safe_version_array[index], safe_arch_array[index])
+ end
+ @installed_version[index]
+ end
- # Given an list of names and versions, generate the full yum syntax package name
- def full_package_name(name, version)
- as_array(name).zip(as_array(version), safe_arch_array).map do |n, v, a|
- yum_syntax(n, v, a)
- end
+ # cache flushing is accomplished by simply restarting the python helper. this produces a roughly
+ # 15% hit to the runtime of installing/removing/upgrading packages. correctly using multipackage
+ # array installs (and the multipackage cookbook) can produce 600% improvements in runtime.
+ def flushcache
+ python_helper.restart
end
- # Generate the yum syntax for the package
- def yum_syntax(name, version, arch)
- s = name
- s += "-#{version}" if version
- s += ".#{arch}" if arch
- s
+ def yum_binary
+ @yum_binary ||=
+ begin
+ yum_binary = new_resource.yum_binary if new_resource.is_a?(Chef::Resource::YumPackage)
+ yum_binary ||= ::File.exist?("/usr/bin/yum-deprecated") ? "yum-deprecated" : "yum"
+ end
end
- # Set the package name correctly based on whether it is a String or Array
- def set_package_name(idx, name)
- if new_resource.package_name.is_a?(String)
- new_resource.package_name(name)
- else
- new_resource.package_name[idx] = name
- end
+ def yum(*args)
+ shell_out_with_timeout!(a_to_s(yum_binary, *args))
end
- # Set the architecture correcly based on whether it is a String or Array
- def set_package_arch(idx, arch)
- if new_resource.package_name.is_a?(String)
- new_resource.arch(arch) unless arch.nil?
+ def safe_version_array
+ if new_resource.version.is_a?(Array)
+ new_resource.version
+ elsif new_resource.version.nil?
+ package_name_array.map { nil }
else
- new_resource.arch ||= []
- new_resource.arch[idx] = arch
+ [ new_resource.version ]
end
end
- # A cousin of package_name_array, return a list of the architectures
- # defined in the resource.
def safe_arch_array
if new_resource.arch.is_a?(Array)
new_resource.arch
@@ -503,13 +243,6 @@ class Chef
end
end
- def flush_cache
- if new_resource.respond_to?("flush_cache")
- new_resource.flush_cache
- else
- { before: false, after: false }
- end
- end
end
end
end
diff --git a/lib/chef/provider/package/yum/python_helper.rb b/lib/chef/provider/package/yum/python_helper.rb
new file mode 100644
index 0000000000..2488ca38c1
--- /dev/null
+++ b/lib/chef/provider/package/yum/python_helper.rb
@@ -0,0 +1,219 @@
+#
+# Copyright:: Copyright 2016-2017, Chef Software Inc.
+# License:: Apache License, Version 2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+require "chef/mixin/which"
+require "chef/mixin/shell_out"
+require "chef/provider/package/yum/version"
+require "timeout"
+
+class Chef
+ class Provider
+ class Package
+ class Yum < Chef::Provider::Package
+ class PythonHelper
+ include Singleton
+ include Chef::Mixin::Which
+ include Chef::Mixin::ShellOut
+
+ attr_accessor :stdin
+ attr_accessor :stdout
+ attr_accessor :stderr
+ attr_accessor :inpipe
+ attr_accessor :outpipe
+ attr_accessor :wait_thr
+
+ YUM_HELPER = ::File.expand_path(::File.join(::File.dirname(__FILE__), "yum_helper.py")).freeze
+
+ def yum_command
+ @yum_command ||= which("python", "python2", "python2.7") do |f|
+ shell_out("#{f} -c 'import yum'").exitstatus == 0
+ end + " #{YUM_HELPER}"
+ end
+
+ def start
+ ENV["PYTHONUNBUFFERED"] = "1"
+ @inpipe, inpipe_write = IO.pipe
+ outpipe_read, @outpipe = IO.pipe
+ @stdin, @stdout, @stderr, @wait_thr = Open3.popen3("#{yum_command} #{outpipe_read.fileno} #{inpipe_write.fileno}", outpipe_read.fileno => outpipe_read, inpipe_write.fileno => inpipe_write, close_others: false)
+ outpipe_read.close
+ inpipe_write.close
+ end
+
+ def reap
+ unless wait_thr.nil?
+ Process.kill("INT", wait_thr.pid) rescue nil
+ begin
+ Timeout.timeout(3) do
+ wait_thr.value # this calls waitpid()
+ end
+ rescue Timeout::Error
+ Process.kill("KILL", wait_thr.pid) rescue nil
+ end
+ stdin.close unless stdin.nil?
+ stdout.close unless stdout.nil?
+ stderr.close unless stderr.nil?
+ inpipe.close unless inpipe.nil?
+ outpipe.close unless outpipe.nil?
+ end
+ end
+
+ def check
+ start if stdin.nil?
+ end
+
+ def compare_versions(version1, version2)
+ query("versioncompare", { "versions" => [version1, version2] }).to_i
+ end
+
+ def install_only_packages(name)
+ query_output = query("installonlypkgs", { "package" => name })
+ if query_output == "False"
+ return false
+ elsif query_output == "True"
+ return true
+ end
+ end
+
+ def options_params(options)
+ options.each_with_object({}) do |opt, h|
+ if opt =~ /--enablerepo=(.+)/
+ $1.split(",").each do |repo|
+ h["enablerepos"] ||= []
+ h["enablerepos"].push(repo)
+ end
+ end
+ if opt =~ /--disablerepo=(.+)/
+ $1.split(",").each do |repo|
+ h["disablerepos"] ||= []
+ h["disablerepos"].push(repo)
+ end
+ end
+ end
+ end
+
+ # @returns Array<Version>
+ def package_query(action, provides, version = nil, arch = nil, options = nil)
+ parameters = { "provides" => provides, "version" => version, "arch" => arch }
+ repo_opts = options_params(options || {})
+ parameters.merge!(repo_opts)
+ query_output = query(action, parameters)
+ version = parse_response(query_output.lines.last)
+ Chef::Log.debug "parsed #{version} from python helper"
+ # XXX: for now we restart after every query with an enablerepo/disablerepo to clean the helpers internal state
+ restart unless repo_opts.empty?
+ version
+ end
+
+ def restart
+ reap
+ start
+ end
+
+ private
+
+ # i couldn't figure out how to decompose an evr on the python side, it seems reasonably
+ # painless to do it in ruby (generally massaging nevras in the ruby side is HIGHLY
+ # discouraged -- this is an "every rule has an exception" exception -- any additional
+ # functionality should probably trigger moving this regexp logic into python)
+ def add_version(hash, version)
+ epoch = nil
+ if version =~ /(\S+):(\S+)/
+ epoch = $1
+ version = $2
+ end
+ if version =~ /(\S+)-(\S+)/
+ version = $1
+ release = $2
+ end
+ hash["epoch"] = epoch unless epoch.nil?
+ hash["release"] = release unless release.nil?
+ hash["version"] = version
+ end
+
+ def query(action, parameters)
+ with_helper do
+ json = build_query(action, parameters)
+ Chef::Log.debug "sending '#{json}' to python helper"
+ outpipe.syswrite json + "\n"
+ output = inpipe.sysread(4096).chomp
+ Chef::Log.debug "got '#{output}' from python helper"
+ return output
+ end
+ end
+
+ def build_query(action, parameters)
+ hash = { "action" => action }
+ parameters.each do |param_name, param_value|
+ hash[param_name] = param_value unless param_value.nil?
+ end
+
+ # Special handling for certain action / param combos
+ if [:whatinstalled, :whatavailable].include?(action)
+ add_version(hash, parameters["version"]) unless parameters["version"].nil?
+ end
+
+ FFI_Yajl::Encoder.encode(hash)
+ end
+
+ def parse_response(output)
+ array = output.split.map { |x| x == "nil" ? nil : x }
+ array.each_slice(3).map { |x| Version.new(*x) }.first
+ end
+
+ def drain_fds
+ output = ""
+ fds, = IO.select([stderr, stdout, inpipe], nil, nil, 0)
+ unless fds.nil?
+ fds.each do |fd|
+ output += fd.sysread(4096) rescue ""
+ end
+ end
+ output
+ rescue => e
+ output
+ end
+
+ def with_helper
+ max_retries ||= 5
+ ret = nil
+ Timeout.timeout(600) do
+ check
+ ret = yield
+ end
+ output = drain_fds
+ unless output.empty?
+ Chef::Log.debug "discarding output on stderr/stdout from python helper: #{output}"
+ end
+ ret
+ rescue EOFError, Errno::EPIPE, Timeout::Error, Errno::ESRCH => e
+ output = drain_fds
+ if ( max_retries -= 1 ) > 0
+ unless output.empty?
+ Chef::Log.debug "discarding output on stderr/stdout from python helper: #{output}"
+ end
+ restart
+ retry
+ else
+ raise e if output.empty?
+ raise "yum-helper.py had stderr/stdout output:\n\n#{output}"
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/chef/provider/package/yum/simplejson/LICENSE.txt b/lib/chef/provider/package/yum/simplejson/LICENSE.txt
new file mode 100644
index 0000000000..e05f49c3fd
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/LICENSE.txt
@@ -0,0 +1,79 @@
+simplejson is dual-licensed software. It is available under the terms
+of the MIT license, or the Academic Free License version 2.1. The full
+text of each license agreement is included below. This code is also
+licensed to the Python Software Foundation (PSF) under a Contributor
+Agreement.
+
+MIT License
+===========
+
+Copyright (c) 2006 Bob Ippolito
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+Academic Free License v. 2.1
+============================
+
+Copyright (c) 2006 Bob Ippolito. All rights reserved.
+
+This Academic Free License (the "License") applies to any original work of authorship (the "Original Work") whose owner (the "Licensor") has placed the following notice immediately following the copyright notice for the Original Work:
+
+Licensed under the Academic Free License version 2.1
+
+1) Grant of Copyright License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license to do the following:
+
+a) to reproduce the Original Work in copies;
+
+b) to prepare derivative works ("Derivative Works") based upon the Original Work;
+
+c) to distribute copies of the Original Work and Derivative Works to the public;
+
+d) to perform the Original Work publicly; and
+
+e) to display the Original Work publicly.
+
+2) Grant of Patent License. Licensor hereby grants You a world-wide, royalty-free, non-exclusive, perpetual, sublicenseable license, under patent claims owned or controlled by the Licensor that are embodied in the Original Work as furnished by the Licensor, to make, use, sell and offer for sale the Original Work and Derivative Works.
+
+3) Grant of Source Code License. The term "Source Code" means the preferred form of the Original Work for making modifications to it and all available documentation describing how to modify the Original Work. Licensor hereby agrees to provide a machine-readable copy of the Source Code of the Original Work along with each copy of the Original Work that Licensor distributes. Licensor reserves the right to satisfy this obligation by placing a machine-readable copy of the Source Code in an information repository reasonably calculated to permit inexpensive and convenient access by You for as long as Licensor continues to distribute the Original Work, and by publishing the address of that information repository in a notice immediately following the copyright notice that applies to the Original Work.
+
+4) Exclusions From License Grant. Neither the names of Licensor, nor the names of any contributors to the Original Work, nor any of their trademarks or service marks, may be used to endorse or promote products derived from this Original Work without express prior written permission of the Licensor. Nothing in this License shall be deemed to grant any rights to trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor except as expressly stated herein. No patent license is granted to make, use, sell or offer to sell embodiments of any patent claims other than the licensed claims defined in Section 2. No right is granted to the trademarks of Licensor even if such marks are included in the Original Work. Nothing in this License shall be interpreted to prohibit Licensor from licensing under different terms from this License any Original Work that Licensor otherwise would have a right to license.
+
+5) This section intentionally omitted.
+
+6) Attribution Rights. You must retain, in the Source Code of any Derivative Works that You create, all copyright, patent or trademark notices from the Source Code of the Original Work, as well as any notices of licensing and any descriptive text identified therein as an "Attribution Notice." You must cause the Source Code for any Derivative Works that You create to carry a prominent Attribution Notice reasonably calculated to inform recipients that You have modified the Original Work.
+
+7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that the copyright in and to the Original Work and the patent rights granted herein by Licensor are owned by the Licensor or are sublicensed to You under the terms of this License with the permission of the contributor(s) of those copyrights and patent rights. Except as expressly stated in the immediately proceeding sentence, the Original Work is provided under this License on an "AS IS" BASIS and WITHOUT WARRANTY, either express or implied, including, without limitation, the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU. This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No license to Original Work is granted hereunder except under this disclaimer.
+
+8) Limitation of Liability. Under no circumstances and under no legal theory, whether in tort (including negligence), contract, or otherwise, shall the Licensor be liable to any person for any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or the use of the Original Work including, without limitation, damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses. This limitation of liability shall not apply to liability for death or personal injury resulting from Licensor's negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You.
+
+9) Acceptance and Termination. If You distribute copies of the Original Work or a Derivative Work, You must make a reasonable effort under the circumstances to obtain the express assent of recipients to the terms of this License. Nothing else but this License (or another written agreement between Licensor and You) grants You permission to create Derivative Works based upon the Original Work or to exercise any of the rights granted in Section 1 herein, and any attempt to do so except under the terms of this License (or another written agreement between Licensor and You) is expressly prohibited by U.S. copyright law, the equivalent laws of other countries, and by international treaty. Therefore, by exercising any of the rights granted to You in Section 1 herein, You indicate Your acceptance of this License and all of its terms and conditions.
+
+10) Termination for Patent Action. This License shall terminate automatically and You may no longer exercise any of the rights granted to You by this License as of the date You commence an action, including a cross-claim or counterclaim, against Licensor or any licensee alleging that the Original Work infringes a patent. This termination provision shall not apply for an action alleging patent infringement by combinations of the Original Work with other software or hardware.
+
+11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this License may be brought only in the courts of a jurisdiction wherein the Licensor resides or in which Licensor conducts its primary business, and under the laws of that jurisdiction excluding its conflict-of-law provisions. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any use of the Original Work outside the scope of this License or after its termination shall be subject to the requirements and penalties of the U.S. Copyright Act, 17 U.S.C. § 101 et seq., the equivalent laws of other countries, and international treaty. This section shall survive the termination of this License.
+
+12) Attorneys Fees. In any action to enforce the terms of this License or seeking damages relating thereto, the prevailing party shall be entitled to recover its costs and expenses, including, without limitation, reasonable attorneys' fees and costs incurred in connection with such action, including any appeal of such action. This section shall survive the termination of this License.
+
+13) Miscellaneous. This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable.
+
+14) Definition of "You" in This License. "You" throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License. For legal entities, "You" includes any entity that controls, is controlled by, or is under common control with you. For purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+15) Right to Use. You may use the Original Work in all ways not otherwise restricted or conditioned by this License or by law, and Licensor promises not to interfere with or be responsible for such uses by You.
+
+This license is Copyright (C) 2003-2004 Lawrence E. Rosen. All rights reserved. Permission is hereby granted to copy and distribute this license without modification. This license may not be modified without the express written permission of its copyright owner.
diff --git a/lib/chef/provider/package/yum/simplejson/__init__.py b/lib/chef/provider/package/yum/simplejson/__init__.py
new file mode 100644
index 0000000000..d5b4d39913
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/__init__.py
@@ -0,0 +1,318 @@
+r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
+JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
+interchange format.
+
+:mod:`simplejson` exposes an API familiar to users of the standard library
+:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
+version of the :mod:`json` library contained in Python 2.6, but maintains
+compatibility with Python 2.4 and Python 2.5 and (currently) has
+significant performance advantages, even without using the optional C
+extension for speedups.
+
+Encoding basic Python object hierarchies::
+
+ >>> import simplejson as json
+ >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+ '["foo", {"bar": ["baz", null, 1.0, 2]}]'
+ >>> print json.dumps("\"foo\bar")
+ "\"foo\bar"
+ >>> print json.dumps(u'\u1234')
+ "\u1234"
+ >>> print json.dumps('\\')
+ "\\"
+ >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+ {"a": 0, "b": 0, "c": 0}
+ >>> from StringIO import StringIO
+ >>> io = StringIO()
+ >>> json.dump(['streaming API'], io)
+ >>> io.getvalue()
+ '["streaming API"]'
+
+Compact encoding::
+
+ >>> import simplejson as json
+ >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+ '[1,2,3,{"4":5,"6":7}]'
+
+Pretty printing::
+
+ >>> import simplejson as json
+ >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+ >>> print '\n'.join([l.rstrip() for l in s.splitlines()])
+ {
+ "4": 5,
+ "6": 7
+ }
+
+Decoding JSON::
+
+ >>> import simplejson as json
+ >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+ >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
+ True
+ >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
+ True
+ >>> from StringIO import StringIO
+ >>> io = StringIO('["streaming API"]')
+ >>> json.load(io)[0] == 'streaming API'
+ True
+
+Specializing JSON object decoding::
+
+ >>> import simplejson as json
+ >>> def as_complex(dct):
+ ... if '__complex__' in dct:
+ ... return complex(dct['real'], dct['imag'])
+ ... return dct
+ ...
+ >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
+ ... object_hook=as_complex)
+ (1+2j)
+ >>> import decimal
+ >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
+ True
+
+Specializing JSON object encoding::
+
+ >>> import simplejson as json
+ >>> def encode_complex(obj):
+ ... if isinstance(obj, complex):
+ ... return [obj.real, obj.imag]
+ ... raise TypeError(repr(o) + " is not JSON serializable")
+ ...
+ >>> json.dumps(2 + 1j, default=encode_complex)
+ '[2.0, 1.0]'
+ >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
+ '[2.0, 1.0]'
+ >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
+ '[2.0, 1.0]'
+
+
+Using simplejson.tool from the shell to validate and pretty-print::
+
+ $ echo '{"json":"obj"}' | python -m simplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -m simplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+"""
+__version__ = '2.0.9'
+__all__ = [
+ 'dump', 'dumps', 'load', 'loads',
+ 'JSONDecoder', 'JSONEncoder',
+]
+
+__author__ = 'Bob Ippolito <bob@redivi.com>'
+
+from decoder import JSONDecoder
+from encoder import JSONEncoder
+
+_default_encoder = JSONEncoder(
+ skipkeys=False,
+ ensure_ascii=True,
+ check_circular=True,
+ allow_nan=True,
+ indent=None,
+ separators=None,
+ encoding='utf-8',
+ default=None,
+)
+
+def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
+ ``.write()``-supporting file-like object).
+
+ If ``skipkeys`` is true then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is false, then the some chunks written to ``fp``
+ may be ``unicode`` instances, subject to normal Python ``str`` to
+ ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
+ understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
+ to cause an error.
+
+ If ``check_circular`` is false, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is false, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
+ in strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and object
+ members will be pretty-printed with that indent level. An indent level
+ of 0 will only insert newlines. ``None`` is the most compact representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (not skipkeys and ensure_ascii and
+ check_circular and allow_nan and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ iterable = _default_encoder.iterencode(obj)
+ else:
+ if cls is None:
+ cls = JSONEncoder
+ iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding,
+ default=default, **kw).iterencode(obj)
+ # could accelerate with writelines in some versions of Python, at
+ # a debuggability cost
+ for chunk in iterable:
+ fp.write(chunk)
+
+
+def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` to a JSON formatted ``str``.
+
+ If ``skipkeys`` is false then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is false, then the return value will be a
+ ``unicode`` instance subject to normal Python ``str`` to ``unicode``
+ coercion rules instead of being escaped to an ASCII ``str``.
+
+ If ``check_circular`` is false, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is false, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
+ strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and
+ object members will be pretty-printed with that indent level. An indent
+ level of 0 will only insert newlines. ``None`` is the most compact
+ representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (not skipkeys and ensure_ascii and
+ check_circular and allow_nan and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ return _default_encoder.encode(obj)
+ if cls is None:
+ cls = JSONEncoder
+ return cls(
+ skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding, default=default,
+ **kw).encode(obj)
+
+
+_default_decoder = JSONDecoder(encoding=None, object_hook=None)
+
+
+def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
+ a JSON document) to a Python object.
+
+ If the contents of ``fp`` is encoded with an ASCII based encoding other
+ than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
+ be specified. Encodings that are not ASCII based (such as UCS-2) are
+ not allowed, and should be wrapped with
+ ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
+ object and passed to ``loads()``
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ return loads(fp.read(),
+ encoding=encoding, cls=cls, object_hook=object_hook,
+ parse_float=parse_float, parse_int=parse_int,
+ parse_constant=parse_constant, **kw)
+
+
+def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
+ document) to a Python object.
+
+ If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
+ other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
+ must be specified. Encodings that are not ASCII based (such as UCS-2)
+ are not allowed and should be decoded to ``unicode`` first.
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN, null, true, false.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ if (cls is None and encoding is None and object_hook is None and
+ parse_int is None and parse_float is None and
+ parse_constant is None and not kw):
+ return _default_decoder.decode(s)
+ if cls is None:
+ cls = JSONDecoder
+ if object_hook is not None:
+ kw['object_hook'] = object_hook
+ if parse_float is not None:
+ kw['parse_float'] = parse_float
+ if parse_int is not None:
+ kw['parse_int'] = parse_int
+ if parse_constant is not None:
+ kw['parse_constant'] = parse_constant
+ return cls(encoding=encoding, **kw).decode(s)
diff --git a/lib/chef/provider/package/yum/simplejson/__init__.pyc b/lib/chef/provider/package/yum/simplejson/__init__.pyc
new file mode 100644
index 0000000000..10679d3b04
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/__init__.pyc
Binary files differ
diff --git a/lib/chef/provider/package/yum/simplejson/decoder.py b/lib/chef/provider/package/yum/simplejson/decoder.py
new file mode 100644
index 0000000000..d921ce0b97
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/decoder.py
@@ -0,0 +1,354 @@
+"""Implementation of JSONDecoder
+"""
+import re
+import sys
+import struct
+
+from simplejson.scanner import make_scanner
+try:
+ from simplejson._speedups import scanstring as c_scanstring
+except ImportError:
+ c_scanstring = None
+
+__all__ = ['JSONDecoder']
+
+FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+
+def _floatconstants():
+ _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+ if sys.byteorder != 'big':
+ _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
+ nan, inf = struct.unpack('dd', _BYTES)
+ return nan, inf, -inf
+
+NaN, PosInf, NegInf = _floatconstants()
+
+
+def linecol(doc, pos):
+ lineno = doc.count('\n', 0, pos) + 1
+ if lineno == 1:
+ colno = pos
+ else:
+ colno = pos - doc.rindex('\n', 0, pos)
+ return lineno, colno
+
+
+def errmsg(msg, doc, pos, end=None):
+ # Note that this function is called from _speedups
+ lineno, colno = linecol(doc, pos)
+ if end is None:
+ #fmt = '{0}: line {1} column {2} (char {3})'
+ #return fmt.format(msg, lineno, colno, pos)
+ fmt = '%s: line %d column %d (char %d)'
+ return fmt % (msg, lineno, colno, pos)
+ endlineno, endcolno = linecol(doc, end)
+ #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
+ #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
+ fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
+ return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
+
+
+_CONSTANTS = {
+ '-Infinity': NegInf,
+ 'Infinity': PosInf,
+ 'NaN': NaN,
+}
+
+STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
+BACKSLASH = {
+ '"': u'"', '\\': u'\\', '/': u'/',
+ 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+}
+
+DEFAULT_ENCODING = "utf-8"
+
+def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
+ """Scan the string s for a JSON string. End is the index of the
+ character in s after the quote that started the JSON string.
+ Unescapes all valid JSON string escape sequences and raises ValueError
+ on attempt to decode an invalid string. If strict is False then literal
+ control characters are allowed in the string.
+
+ Returns a tuple of the decoded string and the index of the character in s
+ after the end quote."""
+ if encoding is None:
+ encoding = DEFAULT_ENCODING
+ chunks = []
+ _append = chunks.append
+ begin = end - 1
+ while 1:
+ chunk = _m(s, end)
+ if chunk is None:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ end = chunk.end()
+ content, terminator = chunk.groups()
+ # Content is contains zero or more unescaped string characters
+ if content:
+ if not isinstance(content, unicode):
+ content = unicode(content, encoding)
+ _append(content)
+ # Terminator is the end of string, a literal control character,
+ # or a backslash denoting that an escape sequence follows
+ if terminator == '"':
+ break
+ elif terminator != '\\':
+ if strict:
+ msg = "Invalid control character %r at" % (terminator,)
+ #msg = "Invalid control character {0!r} at".format(terminator)
+ raise ValueError(errmsg(msg, s, end))
+ else:
+ _append(terminator)
+ continue
+ try:
+ esc = s[end]
+ except IndexError:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ # If not a unicode escape sequence, must be in the lookup table
+ if esc != 'u':
+ try:
+ char = _b[esc]
+ except KeyError:
+ msg = "Invalid \\escape: " + repr(esc)
+ raise ValueError(errmsg(msg, s, end))
+ end += 1
+ else:
+ # Unicode escape sequence
+ esc = s[end + 1:end + 5]
+ next_end = end + 5
+ if len(esc) != 4:
+ msg = "Invalid \\uXXXX escape"
+ raise ValueError(errmsg(msg, s, end))
+ uni = int(esc, 16)
+ # Check for surrogate pair on UCS-4 systems
+ if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
+ msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
+ if not s[end + 5:end + 7] == '\\u':
+ raise ValueError(errmsg(msg, s, end))
+ esc2 = s[end + 7:end + 11]
+ if len(esc2) != 4:
+ raise ValueError(errmsg(msg, s, end))
+ uni2 = int(esc2, 16)
+ uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
+ next_end += 6
+ char = unichr(uni)
+ end = next_end
+ # Append the unescaped character
+ _append(char)
+ return u''.join(chunks), end
+
+
+# Use speedup if available
+scanstring = c_scanstring or py_scanstring
+
+WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
+WHITESPACE_STR = ' \t\n\r'
+
+def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ pairs = {}
+ # Use a slice to prevent IndexError from being raised, the following
+ # check will raise a more specific ValueError if the string is empty
+ nextchar = s[end:end + 1]
+ # Normally we expect nextchar == '"'
+ if nextchar != '"':
+ if nextchar in _ws:
+ end = _w(s, end).end()
+ nextchar = s[end:end + 1]
+ # Trivial empty object
+ if nextchar == '}':
+ return pairs, end + 1
+ elif nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end))
+ end += 1
+ while True:
+ key, end = scanstring(s, end, encoding, strict)
+
+ # To skip some function call overhead we optimize the fast paths where
+ # the JSON key separator is ": " or just ":".
+ if s[end:end + 1] != ':':
+ end = _w(s, end).end()
+ if s[end:end + 1] != ':':
+ raise ValueError(errmsg("Expecting : delimiter", s, end))
+
+ end += 1
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ pairs[key] = value
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+ end += 1
+
+ if nextchar == '}':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end += 1
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+
+ end += 1
+ if nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end - 1))
+
+ if object_hook is not None:
+ pairs = object_hook(pairs)
+ return pairs, end
+
+def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ values = []
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ # Look-ahead for trivial empty array
+ if nextchar == ']':
+ return values, end + 1
+ _append = values.append
+ while True:
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ _append(value)
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ end += 1
+ if nextchar == ']':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end))
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ return values, end
+
+class JSONDecoder(object):
+ """Simple JSON <http://json.org> decoder
+
+ Performs the following translations in decoding by default:
+
+ +---------------+-------------------+
+ | JSON | Python |
+ +===============+===================+
+ | object | dict |
+ +---------------+-------------------+
+ | array | list |
+ +---------------+-------------------+
+ | string | unicode |
+ +---------------+-------------------+
+ | number (int) | int, long |
+ +---------------+-------------------+
+ | number (real) | float |
+ +---------------+-------------------+
+ | true | True |
+ +---------------+-------------------+
+ | false | False |
+ +---------------+-------------------+
+ | null | None |
+ +---------------+-------------------+
+
+ It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
+ their corresponding ``float`` values, which is outside the JSON spec.
+
+ """
+
+ def __init__(self, encoding=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, strict=True):
+ """``encoding`` determines the encoding used to interpret any ``str``
+ objects decoded by this instance (utf-8 by default). It has no
+ effect when decoding ``unicode`` objects.
+
+ Note that currently only encodings that are a superset of ASCII work,
+ strings of other encodings should be passed in as ``unicode``.
+
+ ``object_hook``, if specified, will be called with the result
+ of every JSON object decoded and its return value will be used in
+ place of the given ``dict``. This can be used to provide custom
+ deserializations (e.g. to support JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ """
+ self.encoding = encoding
+ self.object_hook = object_hook
+ self.parse_float = parse_float or float
+ self.parse_int = parse_int or int
+ self.parse_constant = parse_constant or _CONSTANTS.__getitem__
+ self.strict = strict
+ self.parse_object = JSONObject
+ self.parse_array = JSONArray
+ self.parse_string = scanstring
+ self.scan_once = make_scanner(self)
+
+ def decode(self, s, _w=WHITESPACE.match):
+ """Return the Python representation of ``s`` (a ``str`` or ``unicode``
+ instance containing a JSON document)
+
+ """
+ obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+ end = _w(s, end).end()
+ if end != len(s):
+ raise ValueError(errmsg("Extra data", s, end, len(s)))
+ return obj
+
+ def raw_decode(self, s, idx=0):
+ """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
+ with a JSON document) and return a 2-tuple of the Python
+ representation and the index in ``s`` where the document ended.
+
+ This can be used to decode a JSON document from a string that may
+ have extraneous data at the end.
+
+ """
+ try:
+ obj, end = self.scan_once(s, idx)
+ except StopIteration:
+ raise ValueError("No JSON object could be decoded")
+ return obj, end
diff --git a/lib/chef/provider/package/yum/simplejson/decoder.pyc b/lib/chef/provider/package/yum/simplejson/decoder.pyc
new file mode 100644
index 0000000000..d402901870
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/decoder.pyc
Binary files differ
diff --git a/lib/chef/provider/package/yum/simplejson/encoder.py b/lib/chef/provider/package/yum/simplejson/encoder.py
new file mode 100644
index 0000000000..cf58290366
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/encoder.py
@@ -0,0 +1,440 @@
+"""Implementation of JSONEncoder
+"""
+import re
+
+try:
+ from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
+except ImportError:
+ c_encode_basestring_ascii = None
+try:
+ from simplejson._speedups import make_encoder as c_make_encoder
+except ImportError:
+ c_make_encoder = None
+
+ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
+ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
+HAS_UTF8 = re.compile(r'[\x80-\xff]')
+ESCAPE_DCT = {
+ '\\': '\\\\',
+ '"': '\\"',
+ '\b': '\\b',
+ '\f': '\\f',
+ '\n': '\\n',
+ '\r': '\\r',
+ '\t': '\\t',
+}
+for i in range(0x20):
+ #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
+ ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+
+# Assume this produces an infinity on all machines (probably not guaranteed)
+INFINITY = float('1e66666')
+FLOAT_REPR = repr
+
+def encode_basestring(s):
+ """Return a JSON representation of a Python string
+
+ """
+ def replace(match):
+ return ESCAPE_DCT[match.group(0)]
+ return '"' + ESCAPE.sub(replace, s) + '"'
+
+
+def py_encode_basestring_ascii(s):
+ """Return an ASCII-only JSON representation of a Python string
+
+ """
+ if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+ s = s.decode('utf-8')
+ def replace(match):
+ s = match.group(0)
+ try:
+ return ESCAPE_DCT[s]
+ except KeyError:
+ n = ord(s)
+ if n < 0x10000:
+ #return '\\u{0:04x}'.format(n)
+ return '\\u%04x' % (n,)
+ else:
+ # surrogate pair
+ n -= 0x10000
+ s1 = 0xd800 | ((n >> 10) & 0x3ff)
+ s2 = 0xdc00 | (n & 0x3ff)
+ #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
+ return '\\u%04x\\u%04x' % (s1, s2)
+ return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+
+
+encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
+
+class JSONEncoder(object):
+ """Extensible JSON <http://json.org> encoder for Python data structures.
+
+ Supports the following objects and types by default:
+
+ +-------------------+---------------+
+ | Python | JSON |
+ +===================+===============+
+ | dict | object |
+ +-------------------+---------------+
+ | list, tuple | array |
+ +-------------------+---------------+
+ | str, unicode | string |
+ +-------------------+---------------+
+ | int, long, float | number |
+ +-------------------+---------------+
+ | True | true |
+ +-------------------+---------------+
+ | False | false |
+ +-------------------+---------------+
+ | None | null |
+ +-------------------+---------------+
+
+ To extend this to recognize other objects, subclass and implement a
+ ``.default()`` method with another method that returns a serializable
+ object for ``o`` if possible, otherwise it should call the superclass
+ implementation (to raise ``TypeError``).
+
+ """
+ item_separator = ', '
+ key_separator = ': '
+ def __init__(self, skipkeys=False, ensure_ascii=True,
+ check_circular=True, allow_nan=True, sort_keys=False,
+ indent=None, separators=None, encoding='utf-8', default=None):
+ """Constructor for JSONEncoder, with sensible defaults.
+
+ If skipkeys is false, then it is a TypeError to attempt
+ encoding of keys that are not str, int, long, float or None. If
+ skipkeys is True, such items are simply skipped.
+
+ If ensure_ascii is true, the output is guaranteed to be str
+ objects with all incoming unicode characters escaped. If
+ ensure_ascii is false, the output will be unicode object.
+
+ If check_circular is true, then lists, dicts, and custom encoded
+ objects will be checked for circular references during encoding to
+ prevent an infinite recursion (which would cause an OverflowError).
+ Otherwise, no such check takes place.
+
+ If allow_nan is true, then NaN, Infinity, and -Infinity will be
+ encoded as such. This behavior is not JSON specification compliant,
+ but is consistent with most JavaScript based encoders and decoders.
+ Otherwise, it will be a ValueError to encode such floats.
+
+ If sort_keys is true, then the output of dictionaries will be
+ sorted by key; this is useful for regression tests to ensure
+ that JSON serializations can be compared on a day-to-day basis.
+
+ If indent is a non-negative integer, then JSON array
+ elements and object members will be pretty-printed with that
+ indent level. An indent level of 0 will only insert newlines.
+ None is the most compact representation.
+
+ If specified, separators should be a (item_separator, key_separator)
+ tuple. The default is (', ', ': '). To get the most compact JSON
+ representation you should specify (',', ':') to eliminate whitespace.
+
+ If specified, default is a function that gets called for objects
+ that can't otherwise be serialized. It should return a JSON encodable
+ version of the object or raise a ``TypeError``.
+
+ If encoding is not None, then all input strings will be
+ transformed into unicode using that encoding prior to JSON-encoding.
+ The default is UTF-8.
+
+ """
+
+ self.skipkeys = skipkeys
+ self.ensure_ascii = ensure_ascii
+ self.check_circular = check_circular
+ self.allow_nan = allow_nan
+ self.sort_keys = sort_keys
+ self.indent = indent
+ if separators is not None:
+ self.item_separator, self.key_separator = separators
+ if default is not None:
+ self.default = default
+ self.encoding = encoding
+
+ def default(self, o):
+ """Implement this method in a subclass such that it returns
+ a serializable object for ``o``, or calls the base implementation
+ (to raise a ``TypeError``).
+
+ For example, to support arbitrary iterators, you could
+ implement default like this::
+
+ def default(self, o):
+ try:
+ iterable = iter(o)
+ except TypeError:
+ pass
+ else:
+ return list(iterable)
+ return JSONEncoder.default(self, o)
+
+ """
+ raise TypeError(repr(o) + " is not JSON serializable")
+
+ def encode(self, o):
+ """Return a JSON string representation of a Python data structure.
+
+ >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
+ '{"foo": ["bar", "baz"]}'
+
+ """
+ # This is for extremely simple cases and benchmarks.
+ if isinstance(o, basestring):
+ if isinstance(o, str):
+ _encoding = self.encoding
+ if (_encoding is not None
+ and not (_encoding == 'utf-8')):
+ o = o.decode(_encoding)
+ if self.ensure_ascii:
+ return encode_basestring_ascii(o)
+ else:
+ return encode_basestring(o)
+ # This doesn't pass the iterator directly to ''.join() because the
+ # exceptions aren't as detailed. The list call should be roughly
+ # equivalent to the PySequence_Fast that ''.join() would do.
+ chunks = self.iterencode(o, _one_shot=True)
+ if not isinstance(chunks, (list, tuple)):
+ chunks = list(chunks)
+ return ''.join(chunks)
+
+ def iterencode(self, o, _one_shot=False):
+ """Encode the given object and yield each string
+ representation as available.
+
+ For example::
+
+ for chunk in JSONEncoder().iterencode(bigobject):
+ mysocket.write(chunk)
+
+ """
+ if self.check_circular:
+ markers = {}
+ else:
+ markers = None
+ if self.ensure_ascii:
+ _encoder = encode_basestring_ascii
+ else:
+ _encoder = encode_basestring
+ if self.encoding != 'utf-8':
+ def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
+ if isinstance(o, str):
+ o = o.decode(_encoding)
+ return _orig_encoder(o)
+
+ def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
+ # Check for specials. Note that this type of test is processor- and/or
+ # platform-specific, so do tests which don't depend on the internals.
+
+ if o != o:
+ text = 'NaN'
+ elif o == _inf:
+ text = 'Infinity'
+ elif o == _neginf:
+ text = '-Infinity'
+ else:
+ return _repr(o)
+
+ if not allow_nan:
+ raise ValueError(
+ "Out of range float values are not JSON compliant: " +
+ repr(o))
+
+ return text
+
+
+ if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
+ _iterencode = c_make_encoder(
+ markers, self.default, _encoder, self.indent,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, self.allow_nan)
+ else:
+ _iterencode = _make_iterencode(
+ markers, self.default, _encoder, self.indent, floatstr,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, _one_shot)
+ return _iterencode(o, 0)
+
+def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
+ ## HACK: hand-optimized bytecode; turn globals into locals
+ False=False,
+ True=True,
+ ValueError=ValueError,
+ basestring=basestring,
+ dict=dict,
+ float=float,
+ id=id,
+ int=int,
+ isinstance=isinstance,
+ list=list,
+ long=long,
+ str=str,
+ tuple=tuple,
+ ):
+
+ def _iterencode_list(lst, _current_indent_level):
+ if not lst:
+ yield '[]'
+ return
+ if markers is not None:
+ markerid = id(lst)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = lst
+ buf = '['
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ separator = _item_separator + newline_indent
+ buf += newline_indent
+ else:
+ newline_indent = None
+ separator = _item_separator
+ first = True
+ for value in lst:
+ if first:
+ first = False
+ else:
+ buf = separator
+ if isinstance(value, basestring):
+ yield buf + _encoder(value)
+ elif value is None:
+ yield buf + 'null'
+ elif value is True:
+ yield buf + 'true'
+ elif value is False:
+ yield buf + 'false'
+ elif isinstance(value, (int, long)):
+ yield buf + str(value)
+ elif isinstance(value, float):
+ yield buf + _floatstr(value)
+ else:
+ yield buf
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield ']'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode_dict(dct, _current_indent_level):
+ if not dct:
+ yield '{}'
+ return
+ if markers is not None:
+ markerid = id(dct)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = dct
+ yield '{'
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ item_separator = _item_separator + newline_indent
+ yield newline_indent
+ else:
+ newline_indent = None
+ item_separator = _item_separator
+ first = True
+ if _sort_keys:
+ items = dct.items()
+ items.sort(key=lambda kv: kv[0])
+ else:
+ items = dct.iteritems()
+ for key, value in items:
+ if isinstance(key, basestring):
+ pass
+ # JavaScript is weakly typed for these, so it makes sense to
+ # also allow them. Many encoders seem to do something like this.
+ elif isinstance(key, float):
+ key = _floatstr(key)
+ elif key is True:
+ key = 'true'
+ elif key is False:
+ key = 'false'
+ elif key is None:
+ key = 'null'
+ elif isinstance(key, (int, long)):
+ key = str(key)
+ elif _skipkeys:
+ continue
+ else:
+ raise TypeError("key " + repr(key) + " is not a string")
+ if first:
+ first = False
+ else:
+ yield item_separator
+ yield _encoder(key)
+ yield _key_separator
+ if isinstance(value, basestring):
+ yield _encoder(value)
+ elif value is None:
+ yield 'null'
+ elif value is True:
+ yield 'true'
+ elif value is False:
+ yield 'false'
+ elif isinstance(value, (int, long)):
+ yield str(value)
+ elif isinstance(value, float):
+ yield _floatstr(value)
+ else:
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '}'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode(o, _current_indent_level):
+ if isinstance(o, basestring):
+ yield _encoder(o)
+ elif o is None:
+ yield 'null'
+ elif o is True:
+ yield 'true'
+ elif o is False:
+ yield 'false'
+ elif isinstance(o, (int, long)):
+ yield str(o)
+ elif isinstance(o, float):
+ yield _floatstr(o)
+ elif isinstance(o, (list, tuple)):
+ for chunk in _iterencode_list(o, _current_indent_level):
+ yield chunk
+ elif isinstance(o, dict):
+ for chunk in _iterencode_dict(o, _current_indent_level):
+ yield chunk
+ else:
+ if markers is not None:
+ markerid = id(o)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = o
+ o = _default(o)
+ for chunk in _iterencode(o, _current_indent_level):
+ yield chunk
+ if markers is not None:
+ del markers[markerid]
+
+ return _iterencode
diff --git a/lib/chef/provider/package/yum/simplejson/encoder.pyc b/lib/chef/provider/package/yum/simplejson/encoder.pyc
new file mode 100644
index 0000000000..207bce5cfb
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/encoder.pyc
Binary files differ
diff --git a/lib/chef/provider/package/yum/simplejson/scanner.py b/lib/chef/provider/package/yum/simplejson/scanner.py
new file mode 100644
index 0000000000..adbc6ec979
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/scanner.py
@@ -0,0 +1,65 @@
+"""JSON token scanner
+"""
+import re
+try:
+ from simplejson._speedups import make_scanner as c_make_scanner
+except ImportError:
+ c_make_scanner = None
+
+__all__ = ['make_scanner']
+
+NUMBER_RE = re.compile(
+ r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
+ (re.VERBOSE | re.MULTILINE | re.DOTALL))
+
+def py_make_scanner(context):
+ parse_object = context.parse_object
+ parse_array = context.parse_array
+ parse_string = context.parse_string
+ match_number = NUMBER_RE.match
+ encoding = context.encoding
+ strict = context.strict
+ parse_float = context.parse_float
+ parse_int = context.parse_int
+ parse_constant = context.parse_constant
+ object_hook = context.object_hook
+
+ def _scan_once(string, idx):
+ try:
+ nextchar = string[idx]
+ except IndexError:
+ raise StopIteration
+
+ if nextchar == '"':
+ return parse_string(string, idx + 1, encoding, strict)
+ elif nextchar == '{':
+ return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
+ elif nextchar == '[':
+ return parse_array((string, idx + 1), _scan_once)
+ elif nextchar == 'n' and string[idx:idx + 4] == 'null':
+ return None, idx + 4
+ elif nextchar == 't' and string[idx:idx + 4] == 'true':
+ return True, idx + 4
+ elif nextchar == 'f' and string[idx:idx + 5] == 'false':
+ return False, idx + 5
+
+ m = match_number(string, idx)
+ if m is not None:
+ integer, frac, exp = m.groups()
+ if frac or exp:
+ res = parse_float(integer + (frac or '') + (exp or ''))
+ else:
+ res = parse_int(integer)
+ return res, m.end()
+ elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
+ return parse_constant('NaN'), idx + 3
+ elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
+ return parse_constant('Infinity'), idx + 8
+ elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
+ return parse_constant('-Infinity'), idx + 9
+ else:
+ raise StopIteration
+
+ return _scan_once
+
+make_scanner = c_make_scanner or py_make_scanner
diff --git a/lib/chef/provider/package/yum/simplejson/scanner.pyc b/lib/chef/provider/package/yum/simplejson/scanner.pyc
new file mode 100644
index 0000000000..12df070e44
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/scanner.pyc
Binary files differ
diff --git a/lib/chef/provider/package/yum/simplejson/tool.py b/lib/chef/provider/package/yum/simplejson/tool.py
new file mode 100644
index 0000000000..90443317b2
--- /dev/null
+++ b/lib/chef/provider/package/yum/simplejson/tool.py
@@ -0,0 +1,37 @@
+r"""Command-line tool to validate and pretty-print JSON
+
+Usage::
+
+ $ echo '{"json":"obj"}' | python -m simplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -m simplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+
+"""
+import sys
+import simplejson
+
+def main():
+ if len(sys.argv) == 1:
+ infile = sys.stdin
+ outfile = sys.stdout
+ elif len(sys.argv) == 2:
+ infile = open(sys.argv[1], 'rb')
+ outfile = sys.stdout
+ elif len(sys.argv) == 3:
+ infile = open(sys.argv[1], 'rb')
+ outfile = open(sys.argv[2], 'wb')
+ else:
+ raise SystemExit(sys.argv[0] + " [infile [outfile]]")
+ try:
+ obj = simplejson.load(infile)
+ except ValueError, e:
+ raise SystemExit(e)
+ simplejson.dump(obj, outfile, sort_keys=True, indent=4)
+ outfile.write('\n')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/lib/chef/provider/package/yum/version.rb b/lib/chef/provider/package/yum/version.rb
new file mode 100644
index 0000000000..4c586f9b53
--- /dev/null
+++ b/lib/chef/provider/package/yum/version.rb
@@ -0,0 +1,56 @@
+#
+# Copyright:: Copyright 2016, Chef Software, Inc.
+# License:: Apache License, Version 2.0
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+class Chef
+ class Provider
+ class Package
+ class Yum < Chef::Provider::Package
+
+ # helper class to assist in passing around name/version/arch triples
+ class Version
+ attr_accessor :name
+ attr_accessor :version
+ attr_accessor :arch
+
+ def initialize(name, version, arch)
+ @name = name
+ @version = version
+ @arch = arch
+ end
+
+ def to_s
+ "#{name}-#{version}.#{arch}"
+ end
+
+ def version_with_arch
+ "#{version}.#{arch}" unless version.nil?
+ end
+
+ def matches_name_and_arch?(other)
+ other.version == version && other.arch == arch
+ end
+
+ def ==(other)
+ name == other.name && version == other.version && arch == other.arch
+ end
+
+ alias eql? ==
+ end
+ end
+ end
+ end
+end
diff --git a/lib/chef/provider/package/yum/yum-dump.py b/lib/chef/provider/package/yum/yum-dump.py
deleted file mode 100644
index 6183460195..0000000000
--- a/lib/chef/provider/package/yum/yum-dump.py
+++ /dev/null
@@ -1,307 +0,0 @@
-#
-# Author:: Matthew Kent (<mkent@magoazul.com>)
-# Copyright:: Copyright 2009-2016, Matthew Kent
-# License:: Apache License, Version 2.0
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# yum-dump.py
-# Inspired by yumhelper.py by David Lutterkort
-#
-# Produce a list of installed, available and re-installable packages using yum
-# and dump the results to stdout.
-#
-# yum-dump invokes yum similarly to the command line interface which makes it
-# subject to most of the configuration parameters in yum.conf. yum-dump will
-# also load yum plugins in the same manor as yum - these can affect the output.
-#
-# Can be run as non root, but that won't update the cache.
-#
-# Intended to support yum 2.x and 3.x
-
-import os
-import sys
-import time
-import yum
-import re
-import errno
-
-from yum import Errors
-from optparse import OptionParser
-from distutils import version
-
-YUM_PID_FILE='/var/run/yum.pid'
-
-YUM_VER = version.StrictVersion(yum.__version__)
-YUM_MAJOR = YUM_VER.version[0]
-
-if YUM_MAJOR > 3 or YUM_MAJOR < 2:
- print >> sys.stderr, "yum-dump Error: Can't match supported yum version" \
- " (%s)" % yum.__version__
- sys.exit(1)
-
-# Required for Provides output
-if YUM_MAJOR == 2:
- import rpm
- import rpmUtils.miscutils
-
-def setup(yb, options):
- # Only want our output
- #
- if YUM_MAJOR == 3:
- try:
- if YUM_VER >= version.StrictVersion("3.2.22"):
- yb.preconf.errorlevel=0
- yb.preconf.debuglevel=0
-
- # initialize the config
- yb.conf
- else:
- yb.doConfigSetup(errorlevel=0, debuglevel=0)
- except yum.Errors.ConfigError, e:
- # suppresses an ignored exception at exit
- yb.preconf = None
- print >> sys.stderr, "yum-dump Config Error: %s" % e
- return 1
- except ValueError, e:
- yb.preconf = None
- print >> sys.stderr, "yum-dump Options Error: %s" % e
- return 1
- elif YUM_MAJOR == 2:
- yb.doConfigSetup()
-
- def __log(a,b): pass
-
- yb.log = __log
- yb.errorlog = __log
-
- # Give Chef every possible package version, it can decide what to do with them
- if YUM_MAJOR == 3:
- yb.conf.showdupesfromrepos = True
- elif YUM_MAJOR == 2:
- yb.conf.setConfigOption('showdupesfromrepos', True)
-
- # Optionally run only on cached repositories, but non root must use the cache
- if os.geteuid() != 0:
- if YUM_MAJOR == 3:
- yb.conf.cache = True
- elif YUM_MAJOR == 2:
- yb.conf.setConfigOption('cache', True)
- else:
- if YUM_MAJOR == 3:
- yb.conf.cache = options.cache
- elif YUM_MAJOR == 2:
- yb.conf.setConfigOption('cache', options.cache)
-
- # Handle repo toggle via id or glob exactly like yum
- for opt, repos in options.repo_control:
- for repo in repos:
- if opt == '--enablerepo':
- yb.repos.enableRepo(repo)
- elif opt == '--disablerepo':
- yb.repos.disableRepo(repo)
-
- return 0
-
-def dump_packages(yb, list, output_provides):
- packages = {}
-
- if YUM_MAJOR == 2:
- yb.doTsSetup()
- yb.doRepoSetup()
- yb.doSackSetup()
-
- db = yb.doPackageLists(list)
-
- for pkg in db.installed:
- pkg.type = 'i'
- packages[str(pkg)] = pkg
-
- if YUM_VER >= version.StrictVersion("3.2.21"):
- for pkg in db.available:
- pkg.type = 'a'
- packages[str(pkg)] = pkg
-
- # These are both installed and available
- for pkg in db.reinstall_available:
- pkg.type = 'r'
- packages[str(pkg)] = pkg
- else:
- # Old style method - no reinstall list
- for pkg in yb.pkgSack.returnPackages():
-
- if str(pkg) in packages:
- if packages[str(pkg)].type == "i":
- packages[str(pkg)].type = 'r'
- continue
-
- pkg.type = 'a'
- packages[str(pkg)] = pkg
-
- unique_packages = packages.values()
-
- unique_packages.sort(lambda x, y: cmp(x.name, y.name))
-
- for pkg in unique_packages:
- if output_provides == "all" or \
- (output_provides == "installed" and (pkg.type == "i" or pkg.type == "r")):
-
- # yum 2 doesn't have provides_print, implement it ourselves using methods
- # based on requires gathering in packages.py
- if YUM_MAJOR == 2:
- provlist = []
-
- # Installed and available are gathered in different ways
- if pkg.type == 'i' or pkg.type == 'r':
- names = pkg.hdr[rpm.RPMTAG_PROVIDENAME]
- flags = pkg.hdr[rpm.RPMTAG_PROVIDEFLAGS]
- ver = pkg.hdr[rpm.RPMTAG_PROVIDEVERSION]
- if names is not None:
- tmplst = zip(names, flags, ver)
-
- for (n, f, v) in tmplst:
- prov = rpmUtils.miscutils.formatRequire(n, v, f)
- provlist.append(prov)
- # This is slow :(
- elif pkg.type == 'a':
- for prcoTuple in pkg.returnPrco('provides'):
- prcostr = pkg.prcoPrintable(prcoTuple)
- provlist.append(prcostr)
-
- provides = provlist
- else:
- provides = pkg.provides_print
- else:
- provides = "[]"
-
- print '%s %s %s %s %s %s %s %s' % (
- pkg.name,
- pkg.epoch,
- pkg.version,
- pkg.release,
- pkg.arch,
- provides,
- pkg.type,
- pkg.repoid )
-
- return 0
-
-def yum_dump(options):
- lock_obtained = False
-
- yb = yum.YumBase()
-
- status = setup(yb, options)
- if status != 0:
- return status
-
- if options.output_options:
- print "[option installonlypkgs] %s" % " ".join(yb.conf.installonlypkgs)
-
- # Non root can't handle locking on rhel/centos 4
- if os.geteuid() != 0:
- return dump_packages(yb, options.package_list, options.output_provides)
-
- # Wrap the collection and output of packages in yum's global lock to prevent
- # any inconsistencies.
- try:
- # Spin up to --yum-lock-timeout option
- countdown = options.yum_lock_timeout
- while True:
- try:
- yb.doLock(YUM_PID_FILE)
- lock_obtained = True
- except Errors.LockError, e:
- time.sleep(1)
- countdown -= 1
- if countdown == 0:
- print >> sys.stderr, "yum-dump Locking Error! Couldn't obtain an " \
- "exclusive yum lock in %d seconds. Giving up." % options.yum_lock_timeout
- return 200
- else:
- break
-
- return dump_packages(yb, options.package_list, options.output_provides)
-
- # Ensure we clear the lock and cleanup any resources
- finally:
- try:
- yb.closeRpmDB()
- if lock_obtained == True:
- yb.doUnlock(YUM_PID_FILE)
- except Errors.LockError, e:
- print >> sys.stderr, "yum-dump Unlock Error: %s" % e
- return 200
-
-# Preserve order of enable/disable repo args like yum does
-def gather_repo_opts(option, opt, value, parser):
- if getattr(parser.values, option.dest, None) is None:
- setattr(parser.values, option.dest, [])
- getattr(parser.values, option.dest).append((opt, value.split(',')))
-
-def main():
- usage = "Usage: %prog [options]\n" + \
- "Output a list of installed, available and re-installable packages via yum"
- parser = OptionParser(usage=usage)
- parser.add_option("-C", "--cache",
- action="store_true", dest="cache", default=False,
- help="run entirely from cache, don't update cache")
- parser.add_option("-o", "--options",
- action="store_true", dest="output_options", default=False,
- help="output select yum options useful to Chef")
- parser.add_option("-p", "--installed-provides",
- action="store_const", const="installed", dest="output_provides", default="none",
- help="output Provides for installed packages, big/wide output")
- parser.add_option("-P", "--all-provides",
- action="store_const", const="all", dest="output_provides", default="none",
- help="output Provides for all package, slow, big/wide output")
- parser.add_option("-i", "--installed",
- action="store_const", const="installed", dest="package_list", default="all",
- help="output only installed packages")
- parser.add_option("-a", "--available",
- action="store_const", const="available", dest="package_list", default="all",
- help="output only available and re-installable packages")
- parser.add_option("--enablerepo",
- action="callback", callback=gather_repo_opts, type="string", dest="repo_control", default=[],
- help="enable disabled repositories by id or glob")
- parser.add_option("--disablerepo",
- action="callback", callback=gather_repo_opts, type="string", dest="repo_control", default=[],
- help="disable repositories by id or glob")
- parser.add_option("--yum-lock-timeout",
- action="store", type="int", dest="yum_lock_timeout", default=30,
- help="Time in seconds to wait for yum process lock")
-
- (options, args) = parser.parse_args()
-
- try:
- return yum_dump(options)
-
- except yum.Errors.RepoError, e:
- print >> sys.stderr, "yum-dump Repository Error: %s" % e
- return 1
-
- except yum.Errors.YumBaseError, e:
- print >> sys.stderr, "yum-dump General Error: %s" % e
- return 1
-
-try:
- status = main()
-# Suppress a nasty broken pipe error when output is piped to utilities like 'head'
-except IOError, e:
- if e.errno == errno.EPIPE:
- sys.exit(1)
- else:
- raise
-
-sys.exit(status)
diff --git a/lib/chef/provider/package/yum/yum_cache.rb b/lib/chef/provider/package/yum/yum_cache.rb
index 9fd95af138..85337e8e4b 100644
--- a/lib/chef/provider/package/yum/yum_cache.rb
+++ b/lib/chef/provider/package/yum/yum_cache.rb
@@ -1,6 +1,6 @@
# Author:: Adam Jacob (<adam@chef.io>)
-# Copyright:: Copyright 2008-2016, Chef Software, Inc.
+# Copyright:: Copyright 2008-2017, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -16,12 +16,9 @@
# limitations under the License.
#
-require "chef/config"
+require "chef/provider/package/yum/python_helper"
require "chef/provider/package"
-require "chef/mixin/which"
-require "chef/mixin/shell_out"
require "singleton"
-require "chef/provider/package/yum/rpm_utils"
class Chef
class Provider
@@ -29,344 +26,46 @@ class Chef
class Yum < Chef::Provider::Package
# Cache for our installed and available packages, pulled in from yum-dump.py
class YumCache
- include Chef::Mixin::Which
- include Chef::Mixin::ShellOut
include Singleton
- attr_accessor :yum_binary
-
- def initialize
- @rpmdb = RPMDb.new
-
- # Next time @rpmdb is accessed:
- # :all - Trigger a run of "yum-dump.py --options --installed-provides", updates
- # yum's cache and parses options from /etc/yum.conf. Pulls in Provides
- # dependency data for installed packages only - this data is slow to
- # gather.
- # :provides - Same as :all but pulls in Provides data for available packages as well.
- # Used as a last resort when we can't find a Provides match.
- # :installed - Trigger a run of "yum-dump.py --installed", only reads the local rpm
- # db. Used between client runs for a quick refresh.
- # :none - Do nothing, a call to one of the reload methods is required.
- @next_refresh = :all
-
- @allow_multi_install = []
-
- @extra_repo_control = nil
-
- # these are for subsequent runs if we are on an interval
- Chef::Client.when_run_starts do
- YumCache.instance.reload
- end
- end
-
- attr_reader :extra_repo_control
-
- # Cache management
- #
-
- def yum_dump_path
- ::File.join(::File.dirname(__FILE__), "yum-dump.py")
- end
-
def refresh
- case @next_refresh
- when :none
- return nil
- when :installed
- reset_installed
- # fast
- opts = " --installed"
- when :all
- reset
- # medium
- opts = " --options --installed-provides"
- when :provides
- reset
- # slow!
- opts = " --options --all-provides"
- else
- raise ArgumentError, "Unexpected value in next_refresh: #{@next_refresh}"
- end
-
- if @extra_repo_control
- opts << " #{@extra_repo_control}"
- end
-
- opts << " --yum-lock-timeout #{Chef::Config[:yum_lock_timeout]}"
-
- one_line = false
- error = nil
-
- status = nil
-
- begin
- status = shell_out!("#{python_bin} #{yum_dump_path}#{opts}", timeout: Chef::Config[:yum_timeout])
- status.stdout.each_line do |line|
- one_line = true
-
- line.chomp!
- if line =~ /\[option (.*)\] (.*)/
- if $1 == "installonlypkgs"
- @allow_multi_install = $2.split
- else
- raise Chef::Exceptions::Package, "Strange, unknown option line '#{line}' from yum-dump.py"
- end
- next
- end
-
- if line =~ /^(\S+) ([0-9]+) (\S+) (\S+) (\S+) \[(.*)\] ([i,a,r]) (\S+)$/
- name = $1
- epoch = $2
- version = $3
- release = $4
- arch = $5
- provides = parse_provides($6)
- type = $7
- repoid = $8
- else
- Chef::Log.warn("Problem parsing line '#{line}' from yum-dump.py! " \
- "Please check your yum configuration.")
- next
- end
-
- case type
- when "i"
- # if yum-dump was called with --installed this may not be true, but it's okay
- # since we don't touch the @available Set in reload_installed
- available = false
- installed = true
- when "a"
- available = true
- installed = false
- when "r"
- available = true
- installed = true
- end
-
- pkg = RPMDbPackage.new(name, epoch, version, release, arch, provides, installed, available, repoid)
- @rpmdb << pkg
- end
-
- error = status.stderr
- rescue Mixlib::ShellOut::CommandTimeout => e
- Chef::Log.error("#{yum_dump_path} exceeded timeout #{Chef::Config[:yum_timeout]}")
- raise(e)
- end
-
- if status.exitstatus != 0
- raise Chef::Exceptions::Package, "Yum failed - #{status.inspect} - returns: #{error}"
- else
- unless one_line
- Chef::Log.warn("Odd, no output from yum-dump.py. Please check " \
- "your yum configuration.")
- end
- end
-
- # A reload method must be called before the cache is altered
- @next_refresh = :none
- end
-
- def python_bin
- yum_executable = which(yum_binary)
- if yum_executable && shabang?(yum_executable)
- shabang_or_fallback(extract_interpreter(yum_executable))
- else
- Chef::Log.warn("Yum executable not found or doesn't start with #!. Using default python.")
- "/usr/bin/python"
- end
- rescue StandardError => e
- Chef::Log.warn("An error occurred attempting to determine correct python executable. Using default.")
- Chef::Log.debug(e)
- "/usr/bin/python"
- end
-
- def extract_interpreter(file)
- ::File.open(file, "r", &:readline)[2..-1].strip
- end
-
- # dnf based systems have a yum shim that has /bin/bash as the interpreter. Don't use this.
- def shabang_or_fallback(interpreter)
- if interpreter == "/bin/bash"
- Chef::Log.warn("Yum executable interpreter is /bin/bash. Falling back to default python.")
- "/usr/bin/python"
- else
- interpreter
- end
- end
-
- def shabang?(file)
- ::File.open(file, "r") do |f|
- f.read(2) == "#!"
- end
- rescue Errno::ENOENT
- false
+ python_helper.restart
end
def reload
- @next_refresh = :all
+ python_helper.restart
end
def reload_installed
- @next_refresh = :installed
+ python_helper.restart
end
def reload_provides
- @next_refresh = :provides
+ python_helper.restart
end
def reset
- @rpmdb.clear
+ python_helper.restart
end
def reset_installed
- @rpmdb.clear_installed
- end
-
- # Querying the cache
- #
-
- # Check for package by name or name+arch
- def package_available?(package_name)
- refresh
-
- if @rpmdb.lookup(package_name)
- return true
- else
- if package_name =~ /^(.*)\.(.*)$/
- pkg_name = $1
- pkg_arch = $2
-
- if matches = @rpmdb.lookup(pkg_name)
- matches.each do |m|
- return true if m.arch == pkg_arch
- end
- end
- end
- end
-
- false
- end
-
- # Returns a array of packages satisfying an RPMDependency
- def packages_from_require(rpmdep)
- refresh
- @rpmdb.whatprovides(rpmdep)
- end
-
- # Check if a package-version.arch is available to install
- def version_available?(package_name, desired_version, arch = nil)
- version(package_name, arch, true, false) do |v|
- return true if desired_version == v
- end
-
- false
- end
-
- # Return the source repository for a package-version.arch
- def package_repository(package_name, desired_version, arch = nil)
- package(package_name, arch, true, false) do |pkg|
- return pkg.repoid if desired_version == pkg.version.to_s
- end
-
- nil
+ python_helper.restart
end
- # Return the latest available version for a package.arch
- def available_version(package_name, arch = nil)
- version(package_name, arch, true, false)
+ def available_version(name)
+ p = python_helper.package_query(:whatavailable, name)
+ "#{p.version}.#{p.arch}"
end
- alias candidate_version available_version
- # Return the currently installed version for a package.arch
- def installed_version(package_name, arch = nil)
- version(package_name, arch, false, true)
- end
-
- # Return an array of packages allowed to be installed multiple times, such as the kernel
- def allow_multi_install
- refresh
- @allow_multi_install
- end
-
- def enable_extra_repo_control(arg)
- # Don't touch cache if it's the same repos as the last load
- unless @extra_repo_control == arg
- @extra_repo_control = arg
- reload
- end
- end
-
- def disable_extra_repo_control
- # Only force reload when set
- if @extra_repo_control
- @extra_repo_control = nil
- reload
- end
+ def installed_version(name)
+ p = python_helper.package_query(:whatinstalled, name)
+ "#{p.version}.#{p.arch}"
end
private
- def version(package_name, arch = nil, is_available = false, is_installed = false)
- package(package_name, arch, is_available, is_installed) do |pkg|
- if block_given?
- yield pkg.version.to_s
- else
- # first match is latest version
- return pkg.version.to_s
- end
- end
-
- if block_given?
- return self
- else
- return nil
- end
- end
-
- def package(package_name, arch = nil, is_available = false, is_installed = false)
- refresh
- packages = @rpmdb[package_name]
- if packages
- packages.each do |pkg|
- if is_available
- next unless @rpmdb.available?(pkg)
- end
- if is_installed
- next unless @rpmdb.installed?(pkg)
- end
- if arch
- next unless pkg.arch == arch
- end
-
- if block_given?
- yield pkg
- else
- # first match is latest version
- return pkg
- end
- end
- end
-
- if block_given?
- return self
- else
- return nil
- end
- end
-
- # Parse provides from yum-dump.py output
- def parse_provides(string)
- ret = []
- # ['atk = 1.12.2-1.fc6', 'libatk-1.0.so.0']
- string.split(", ").each do |seg|
- # 'atk = 1.12.2-1.fc6'
- if seg =~ /^'(.*)'$/
- ret << RPMProvide.parse($1)
- end
- end
-
- ret
+ def python_helper
+ @python_helper ||= PythonHelper.instance
end
end # YumCache
diff --git a/lib/chef/provider/package/yum/yum_helper.py b/lib/chef/provider/package/yum/yum_helper.py
new file mode 100644
index 0000000000..72e1177e8e
--- /dev/null
+++ b/lib/chef/provider/package/yum/yum_helper.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python3
+# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
+
+#
+# NOTE: this actually needs to run under python2.4 and centos 5.x through python3 and centos 7.x
+# please manually test changes on centos5 boxes or you will almost certainly break things.
+#
+
+import sys
+import yum
+import signal
+import os
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'simplejson'))
+try: import json
+except ImportError: import simplejson as json
+import re
+from rpmUtils.miscutils import stringToVersion,compareEVR
+from rpmUtils.arch import getBaseArch, getArchList
+
+
+try: from yum.misc import string_to_prco_tuple
+except ImportError:
+ # RHEL5 compat
+ def string_to_prco_tuple(prcoString):
+ prco_split = prcoString.split()
+ n, f, v = prco_split
+ (prco_e, prco_v, prco_r) = stringToVersion(v)
+ return (n, f, (prco_e, prco_v, prco_r))
+
+base = None
+
+def get_base():
+ global base
+ if base is None:
+ base = yum.YumBase()
+ setup_exit_handler()
+ return base
+
+def versioncompare(versions):
+ arch_list = getArchList()
+ candidate_arch1 = versions[0].split(".")[-1]
+ candidate_arch2 = versions[1].split(".")[-1]
+
+ # The first version number passed to this method is always a valid nevra (the current version)
+ # If the second version number looks like it does not contain a valid arch
+ # then we'll chop the arch component (assuming it *is* a valid one) from the first version string
+ # so we're only comparing the evr portions.
+ if (candidate_arch2 not in arch_list) and (candidate_arch1 in arch_list):
+ final_version1 = versions[0].replace("." + candidate_arch1,"")
+ else:
+ final_version1 = versions[0]
+
+ final_version2 = versions[1]
+
+ (e1, v1, r1) = stringToVersion(final_version1)
+ (e2, v2, r2) = stringToVersion(final_version2)
+
+ evr_comparison = compareEVR((e1, v1, r1), (e2, v2, r2))
+ outpipe.write("%(e)s\n" % { 'e': evr_comparison })
+ outpipe.flush()
+
+def install_only_packages(name):
+ base = get_base()
+ if name in base.conf.installonlypkgs:
+ outpipe.write('True')
+ else:
+ outpipe.write('False')
+ outpipe.flush()
+
+# python2.4 / centos5 compat
+try:
+ any
+except NameError:
+ def any(s):
+ for v in s:
+ if v:
+ return True
+ return False
+
+def query(command):
+ base = get_base()
+
+ enabled_repos = base.repos.listEnabled()
+
+ # Handle any repocontrols passed in with our options
+ if 'enablerepos' in command:
+ base.repos.enableRepo(*command['enablerepos'])
+
+ if 'disablerepos' in command:
+ base.repos.disableRepo(*command['disablerepos'])
+
+ args = { 'name': command['provides'] }
+ do_nevra = False
+ if 'epoch' in command:
+ args['epoch'] = command['epoch']
+ do_nevra = True
+ if 'version' in command:
+ args['ver'] = command['version']
+ do_nevra = True
+ if 'release' in command:
+ args['rel'] = command['release']
+ do_nevra = True
+ if 'arch' in command:
+ desired_arch = command['arch']
+ args['arch'] = command['arch']
+ do_nevra = True
+ else:
+ desired_arch = getBaseArch()
+
+ obj = None
+ if command['action'] == "whatinstalled":
+ obj = base.rpmdb
+ else:
+ obj = base.pkgSack
+
+ # if we are given "name == 1.2.3" then we must use the getProvides() API.
+ # - this means that we ignore arch and version properties when given prco tuples as a package_name
+ # - in order to fix this, something would have to happen where getProvides was called first and
+ # then the result was searchNevra'd. please be extremely careful if attempting to fix that
+ # since searchNevra does not support prco tuples.
+ if any(elem in command['provides'] for elem in r"<=>"):
+ # handles flags (<, >, =, etc) and versions, but no wildcareds
+ pkgs = obj.getProvides(*string_to_prco_tuple(command['provides']))
+ elif do_nevra:
+ # now if we're given version or arch properties explicitly, then we do a SearchNevra.
+ # - this means that wildcard version in the package_name with an arch property will not work correctly
+ # - again don't try to fix this just by pushing bugs around in the code, you would need to call
+ # returnPackages and searchProvides and then apply the Nevra filters to those results.
+ pkgs = obj.searchNevra(**args)
+ if (command['action'] == "whatinstalled") and (not pkgs):
+ pkgs = obj.searchNevra(name=args['name'], arch=desired_arch)
+ else:
+ pats = [command['provides']]
+ pkgs = obj.returnPackages(patterns=pats)
+
+ if not pkgs:
+ # handles wildcards
+ pkgs = obj.searchProvides(command['provides'])
+
+ if not pkgs:
+ outpipe.write(command['provides'].split().pop(0)+' nil nil\n')
+ outpipe.flush()
+ else:
+ # make sure we picked the package with the highest version
+ pkgs = base.bestPackagesFromList(pkgs,single_name=True)
+ pkg = pkgs.pop(0)
+ outpipe.write("%(n)s %(e)s:%(v)s-%(r)s %(a)s\n" % { 'n': pkg.name, 'e': pkg.epoch, 'v': pkg.version, 'r': pkg.release, 'a': pkg.arch })
+ outpipe.flush()
+
+ # Reset any repos we were passed in enablerepo/disablerepo to the original state in enabled_repos
+ if 'enablerepos' in command:
+ for repo in command['enablerepos']:
+ if base.repos.getRepo(repo) not in enabled_repos:
+ base.repos.disableRepo(repo)
+
+ if 'disablerepos' in command:
+ for repo in command['disablerepos']:
+ if base.repos.getRepo(repo) in enabled_repos:
+ base.repos.enableRepo(repo)
+
+# the design of this helper is that it should try to be 'brittle' and fail hard and exit in order
+# to keep process tables clean. additional error handling should probably be added to the retry loop
+# on the ruby side.
+def exit_handler(signal, frame):
+ base.closeRpmDB()
+ sys.exit(0)
+
+def setup_exit_handler():
+ signal.signal(signal.SIGINT, exit_handler)
+ signal.signal(signal.SIGHUP, exit_handler)
+ signal.signal(signal.SIGPIPE, exit_handler)
+ signal.signal(signal.SIGQUIT, exit_handler)
+
+if len(sys.argv) < 3:
+ inpipe = sys.stdin
+ outpipe = sys.stdout
+else:
+ inpipe = os.fdopen(int(sys.argv[1]), "r")
+ outpipe = os.fdopen(int(sys.argv[2]), "w")
+
+while 1:
+ # kill self if we get orphaned (tragic)
+ ppid = os.getppid()
+ if ppid == 1:
+ sys.exit(0)
+ setup_exit_handler()
+ line = inpipe.readline()
+ command = json.loads(line)
+ if command['action'] == "whatinstalled":
+ query(command)
+ elif command['action'] == "whatavailable":
+ query(command)
+ elif command['action'] == "versioncompare":
+ versioncompare(command['versions'])
+ elif command['action'] == "installonlypkgs":
+ install_only_packages(command['package'])
+ else:
+ raise RuntimeError("bad command")