18588974 solver can fail with faulty rejections for incorporates s11-update s11u3b11
authorsaurabh.vyas@oracle.com
Wed, 19 Nov 2014 09:58:50 +0530
branchs11-update
changeset 3134 c1da85e7efe7
parent 3132 8168921f1872
child 3135 fc71d5045295
18588974 solver can fail with faulty rejections for incorporates 15618618 solver plan errors could explain incorporation-related failures better 15709997 missing stderr line-break on some pkg uninstall errors 15739070 need diganostics if --rejected pkgs are required by installed 15750887 solver error output could be reduced / clarified 15795063 solver error messages need improvement for conditional deps 17384240 pkg solver emits no useful error when exclude deps reject proposed packages 18698282 solver falsely lists packages as excluded by installed incorp 18698613 solver emits no useful errors for optional deps that reject proposed packages 15704264 variant mismatch message is cryptic 19816038 pkg planning phase performance needs improvement
src/client.py
src/modules/client/api_errors.py
src/modules/client/pkg_solver.py
src/modules/fmri.py
src/setup.py
src/tests/api/t_linked_image.py
src/tests/cli/t_pkg_image_update.py
src/tests/cli/t_pkg_install.py
src/tests/cli/t_pkg_version.py
src/tests/pkg5unittest.py
--- a/src/client.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/client.py	Wed Nov 19 09:58:50 2014 +0530
@@ -1661,11 +1661,7 @@
                     ) % locals())
                 return EXIT_OOPS
         if e_type == api_errors.NonLeafPackageException:
-                error(_("""\
-Cannot remove '%s' due to the following packages that depend on it:"""
-                    ) % e.fmri, cmd=op)
-                for d in e.dependents:
-                        logger.error("  %s" % d)
+                error("\n" + str(e), cmd=op)
                 return EXIT_OOPS
         if e_type == api_errors.CatalogRefreshException:
                 display_catalog_failures(e)
--- a/src/modules/client/api_errors.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/modules/client/api_errors.py	Wed Nov 19 09:58:50 2014 +0530
@@ -25,6 +25,7 @@
 #
 
 import errno
+import operator
 import os
 import xml.parsers.expat as expat
 import urlparse
@@ -1353,9 +1354,15 @@
                 self.dependents = args[1]
 
         def __str__(self):
-                s = _("Unable to remove '%s' due to the following packages "
-                    "that depend on it:\n") % self.fmri
-                s += "\n".join(str(f) for f in self.dependents)
+                s = _("Unable to remove '{0}' due to the following packages "
+                    "that depend on it:\n").format(self.fmri.get_short_fmri(
+                        anarchy=True, include_scheme=False))
+                skey = operator.attrgetter('pkg_name')
+                s += "\n".join(
+                    "  {0}".format(f.get_short_fmri(anarchy=True,
+                        include_scheme=False))
+                    for f in sorted(self.dependents, key=skey)
+                )
                 return s
 
 def _str_autofix(self):
--- a/src/modules/client/pkg_solver.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/modules/client/pkg_solver.py	Wed Nov 19 09:58:50 2014 +0530
@@ -23,7 +23,11 @@
 #
 # Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
-import os
+
+"""Provides the interfaces and exceptions needed to determine which packages
+should be installed, updated, or removed to perform a requested operation."""
+
+import operator
 import time
 
 import pkg.actions
@@ -47,23 +51,67 @@
 SOLVER_FAIL    = "Failed"
 SOLVER_SUCCESS = "Succeeded"
 
+#
+# Constants representing reasons why packages were trimmed from possible set.
+# The reasons listed below do *not* always map 1:1 to the error text produced;
+# instead, they indicate the 'type' of trim applied. Values below must be
+# unique, but can be changed at any time.
+#
+_TRIM_DEP_MISSING = 0              # no matching pkg version found for dep
+_TRIM_DEP_OBSOLETE = 1             # all versions allowed by dep are obsolete
+_TRIM_DEP_TRIMMED = 2              # all versions allowed by dep already trimmed
+_TRIM_FIRMWARE = 3                 # firmware version requirement
+_TRIM_FREEZE = 4                   # pkg not allowed by freeze
+_TRIM_INSTALLED_EXCLUDE = 5        # pkg excludes installed pkg
+_TRIM_INSTALLED_INC = 6            # not allowed by installed pkg incorporation
+_TRIM_INSTALLED_NEWER = 7          # newer version installed already
+_TRIM_INSTALLED_ORIGIN = 8         # installed version in image too old
+_TRIM_INSTALLED_ROOT_ORIGIN = 9    # installed version in root image too old
+_TRIM_PARENT_MISSING = 10          # parent image must have this pkg too
+_TRIM_PARENT_NEWER = 11            # parent image has newer version
+_TRIM_PARENT_OLDER = 12            # parent image has older version
+_TRIM_PARENT_PUB = 13              # parent image has different publisher
+_TRIM_PROPOSED_INC = 14            # not allowed by requested pkg incorporation
+_TRIM_PROPOSED_PUB = 15            # didn't match requested publisher
+_TRIM_PROPOSED_VER = 16            # didn't match requested version
+_TRIM_PUB_RANK = 17                # pkg from higher or lower ranked publisher
+_TRIM_PUB_STICKY = 18              # pkg publisher != installed pkg publisher
+_TRIM_REJECT = 19                  # --reject
+_TRIM_UNSUPPORTED = 20             # invalid or unsupported actions
+_TRIM_VARIANT = 21                 # unsupported variant (e.g. i386 on sparc)
+_TRIM_MAX = 22                     # number of trim constants
+
+
 class DependencyException(Exception):
         """local exception used to pass failure to match
         dependencies in packages out of nested evaluation"""
 
-        def __init__(self, reason, fmris=EmptyI):
+        def __init__(self, reason_id, reason, fmris=EmptyI):
                 Exception.__init__(self)
                 self.__fmris = fmris
+                self.__reason_id = reason_id
                 self.__reason = reason
+
         @property
         def fmris(self):
+                """The FMRIs related to the exception."""
                 return self.__fmris
+
+        @property
+        def reason_id(self):
+                """A constant indicating why the related FMRIs were rejected."""
+                return self.__reason_id
+
         @property
         def reason(self):
+                """A string describing why the related FMRIs were rejected."""
                 return self.__reason
 
 
 class PkgSolver(object):
+        """Provides a SAT-based solution solver to determine which packages
+        should be installed, updated, or removed to perform a requested
+        operation."""
 
         def __init__(self, cat, installed_dict, pub_ranks, variants, avoids,
             parent_pkgs, progtrack):
@@ -76,12 +124,17 @@
 
                 # check if we're allowed to use the solver
                 if DebugValues["no_solver"]:
-                        raise RuntimeError, "no_solver set, but solver invoked"
+                        raise RuntimeError("no_solver set, but solver invoked")
 
                 self.__catalog = cat
+                self.__known_incs = set()       # stems with incorporate deps
                 self.__publisher = {}           # indexed by stem
                 self.__possible_dict = defaultdict(list) # indexed by stem
                 self.__pub_ranks = pub_ranks    # rank indexed by pub
+                self.__depend_ts = False        # flag used to indicate whether
+                                                # any dependencies with
+                                                # timestamps were seen; used in
+                                                # error output generation
                 self.__trim_dict = defaultdict(set) # fmris trimmed from
                                                 # consideration
 
@@ -107,7 +160,6 @@
 
                 self.__solver = pkg.solver.msat_solver()
 
-                self.__poss_set = set()         # possible fmris after assign
                 self.__progtrack = progtrack    # progress tracker
                 self.__progitem = None          # progress tracker plan item
 
@@ -132,10 +184,11 @@
                 self.__start_time = 0
                 self.__inc_list = []
                 self.__dependents = None
-                self.__root_fmris = None        # set of fmris installed in root image;
-                                                # used for origin dependencies
+                self.__root_fmris = None        # set of fmris installed in root
+                                                # image; used for origin
+                                                # dependencies
                 self.__avoid_set = avoids.copy()# set of stems we're avoiding
-                self.__obs_set = None           #
+                self.__obs_set = None           # set of obsolete stems
                 self.__reject_set = set()       # set of stems we're rejecting
 
                 # Internal cache of created fmri objects.  Used so that the same
@@ -158,24 +211,26 @@
                 self.__firmware = Firmware()
 
         def __str__(self):
-
                 s = "Solver: ["
                 if self.__state in [SOLVER_FAIL, SOLVER_SUCCESS]:
-                        s += " Variables: %d Clauses: %d Iterations: %d" % (
-                            self.__variables, self.__clauses, self.__iterations)
-                s += " State: %s]" % self.__state
+                        s += (" Variables: {0:d} Clauses: {1:d} Iterations: "
+                            "{2:d}").format(self.__variables, self.__clauses,
+                                self.__iterations)
+                s += " State: {0}]".format(self.__state)
 
                 s += "\nTimings: ["
-                s += ", ".join(["%s: %6.3f" % a for a in self.__timings])
+                s += ", ".join([
+                    "{0}: {1: 6.3f}".format(*a)
+                    for a in self.__timings
+                ])
                 s += "]"
 
                 if self.__inc_list:
-
                         incs = "\n\t".join([str(a) for a in self.__inc_list])
                 else:
                         incs = "None"
 
-                s += "\nMaintained incorporations: %s\n" % incs
+                s += "\nMaintained incorporations: {0}\n".format(incs)
 
                 return s
 
@@ -197,7 +252,6 @@
                 self.__id2fmri = None
                 self.__fmri2id = None
                 self.__solver = None
-                self.__poss_set = None
                 self.__progtrack = None
                 self.__addclause_failure = False
                 self.__variant_dict = None
@@ -219,6 +273,7 @@
                 return rval
 
         def __progress(self):
+                """Bump progress tracker to indicate processing is active."""
                 assert self.__progitem
                 self.__progtrack.plan_add_progress(self.__progitem)
 
@@ -230,16 +285,415 @@
                 if self.__subphasename is not None:
                         self.__end_subphase()
                 self.__start_time = time.time()
-                self.__subphasename = "phase %d" % subphase
+                self.__subphasename = "phase {0:d}".format(subphase)
                 self.__progress()
 
         def __end_subphase(self):
+                """Mark the end of a solver subphase, recording time taken."""
                 now = time.time()
                 self.__timings.append((self.__subphasename,
                     now - self.__start_time))
                 self.__start_time = None
                 self.__subphasename = None
 
+        def __trim_frozen(self, existing_freezes):
+                """Trim any packages we cannot update due to freezes."""
+                for f, r, _t in existing_freezes:
+                        if r:
+                                reason = (N_("This version is excluded by a "
+                                    "freeze on {0} at version {1}.  The "
+                                    "reason for the freeze is: {2}"),
+                                    (f.pkg_name,
+                                        f.version.get_short_version(), r))
+                        else:
+                                reason = (N_("This version is excluded by a "
+                                    "freeze on {0} at version {1}."),
+                                    (f.pkg_name, f.version.get_short_version()))
+                        self.__trim(self.__comb_auto_fmris(f, dotrim=False)[1],
+                            _TRIM_FREEZE, reason)
+
+        def __raise_solution_error(self, no_version=EmptyI, no_solution=EmptyI):
+                """Raise a plan exception due to solution errors."""
+
+                solver_errors = None
+                if DebugValues["plan"]:
+                        solver_errors = self.get_trim_errors()
+                raise api_errors.PlanCreationException(no_solution=no_solution,
+                    no_version=no_version, solver_errors=solver_errors)
+
+        def __trim_proposed(self, proposed_dict):
+                """Remove any versions from proposed_dict that are in trim_dict
+                and raise an exception if no matching version of a proposed
+                package can be installed at this point."""
+
+                if proposed_dict is None:
+                        # Nothing to do.
+                        return
+
+                # Used to de-dup errors.
+                already_seen = set()
+
+                ret = []
+                for name in proposed_dict:
+                        tv = self.__dotrim(proposed_dict[name])
+                        if tv:
+                                proposed_dict[name] = tv
+                                continue
+
+                        ret.extend([_("No matching version of {0} can be "
+                            "installed:").format(name)])
+                        ret.extend(self.__fmri_list_errors(proposed_dict[name],
+                            already_seen=already_seen))
+                        # continue processing and accumulate all errors
+                if ret:
+                        self.__raise_solution_error(no_version=ret)
+
+        def __set_removed_and_required_packages(self, rejected, proposed=None):
+                """Sets the list of package to be removed from the image, the
+                list of packages to reject, the list of packages to avoid
+                during the operation, and the list of packages that must not be
+                removed from the image.
+
+                'rejected' is a set of package stems to reject.
+
+                'proposed' is an optional set of FMRI objects representing
+                packages to install or update.
+
+                Upon return:
+                  * self.__removal_fmris will contain the list of FMRIs to be
+                    removed from the image due to user request or due to past
+                    bugs that caused wrong variant to be installed by mistake.
+
+                  * self.__reject_set will contain the list of packages to avoid
+                    or that were rejected by user request as appropriate."""
+
+                if proposed is None:
+                        proposed = set()
+                else:
+                        # remove packages to be installed from avoid_set
+                        self.__avoid_set -= proposed
+
+                self.__removal_fmris |= set([
+                    self.__installed_dict[name]
+                    for name in rejected
+                    if name in self.__installed_dict
+                ] + [
+                    f
+                    for f in self.__installed_fmris
+                    if not self.__trim_nonmatching_variants(f)
+                ])
+
+                self.__reject_set = rejected
+
+                # trim fmris that user explicitly disallowed
+                for name in rejected:
+                        self.__trim(self.__get_catalog_fmris(name),
+                            _TRIM_REJECT,
+                            N_("This version rejected by user request"))
+
+                self.__req_pkg_names = (self.__installed_pkgs |
+                    proposed) - rejected
+                self.__req_pkg_names -= set(
+                    f.pkg_name
+                    for f in self.__removal_fmris
+                )
+
+        def __update_possible_closure(self, possible, excludes,
+            full_trim=False):
+                """Update the provided possible set of fmris with the transitive
+                closure of dependencies that can be satisfied, trimming those
+                packages that cannot be installed.
+
+                'proposed' is a set of FMRI objects representing all possible
+                versions of packages to consider for the operation.
+
+                'full_trim' is an optional boolean indicating whether a full
+                trim of the dependency graph should be performed.  This is NOT
+                required for the solver to find a solution.  Trimming is only
+                needed to reduce the size of clauses and to provide error
+                messages.  This requires multiple passes to determine if the
+                transitive closure of dependencies can be satisfied.  This is
+                not required for correctness (and it greatly increases runtime).
+                However, it does greatly improve error messaging for some error
+                cases.
+
+                An example of a case where full_trim will be useful (dueling
+                incorporations):
+
+                Installed:
+                  entire
+                    incorporates java-7-incorporation
+                Proposed:
+                  osnet-incorporation
+                    incorporates system/resource-mgmt/dynamic-resource-pools
+                  system/resource-mgmt/dynamic-resource-pools
+                    requires new version of java not allowed by installed
+                      java-7-incorporation"""
+
+                first = True
+                while True:
+                        tsize = len(self.__trim_dict)
+                        res = self.__generate_dependency_closure(
+                            possible, excludes=excludes, full_trim=full_trim)
+                        if first:
+                                # The first pass will return the transitive
+                                # closure of all dependencies; subsequent passes
+                                # are only done for trimming, so need to update
+                                # the possible set only on first pass.
+                                possible.update(res)
+                                first = False
+
+                        nsize = len(self.__trim_dict)
+                        if not full_trim or nsize == tsize:
+                                # Nothing more to trim.
+                                break
+
+                # Remove trimmed items from possible_set.
+                possible.difference_update(self.__trim_dict.iterkeys())
+
+        def __enforce_unique_packages(self, excludes):
+                """Constrain the solver solution so that only one version of
+                each package can be installed and generate dependency clauses
+                for possible packages."""
+
+                # Generate clauses for only one version of each package, and
+                # for dependencies for each package.  Do so for all possible
+                # fmris.
+                for name in self.__possible_dict:
+                        self.__progress()
+                        # Ensure only one version of a package is installed
+                        self.__addclauses(self.__gen_highlander_clauses(
+                            self.__possible_dict[name]))
+                        # generate dependency clauses for each pkg
+                        for fmri in self.__possible_dict[name]:
+                                for da in self.__get_dependency_actions(fmri,
+                                    excludes=excludes):
+                                        self.__addclauses(
+                                            self.__gen_dependency_clauses(fmri,
+                                            da))
+
+        def __generate_operation_clauses(self, proposed=None,
+            proposed_dict=None):
+                """Generate initial solver clauses for the proposed packages (if
+                any) and installed packages.
+
+                'proposed' is a set of FMRI objects representing packages to
+                install or update.
+
+                'proposed_dict' contains user specified FMRI objects indexed by
+                pkg_name that should be installed or updated within an image."""
+
+                assert ((proposed is None and proposed_dict is None) or
+                    (proposed is not None and proposed_dict is not None))
+
+                if proposed is None:
+                        proposed = set()
+                if proposed_dict is None:
+                        proposed_dict = EmptyDict
+
+                # Generate clauses for proposed and installed pkgs note that we
+                # create clauses that require one of the proposed pkgs to work;
+                # this allows the possible_set to always contain the existing
+                # pkgs.
+                for name in proposed_dict:
+                        self.__progress()
+                        self.__addclauses(
+                            self.__gen_one_of_these_clauses(
+                                set(proposed_dict[name]) &
+                                set(self.__possible_dict[name])))
+
+                for name in (self.__installed_pkgs - proposed -
+                    self.__reject_set - self.__avoid_set):
+                        self.__progress()
+
+                        if (self.__installed_dict[name] in
+                            self.__removal_fmris):
+                                # we're uninstalling this package
+                                continue
+
+                        if name in self.__possible_dict:
+                                self.__addclauses(
+                                    self.__gen_one_of_these_clauses(
+                                        self.__possible_dict[name]))
+
+        def __begin_solve(self):
+                """Prepares solver for solution creation returning a
+                ProgressTracker object to be used for the operation."""
+
+                # Once solution has been returned or failure has occurred, a new
+                # solver must be used.
+                assert self.__state == SOLVER_INIT
+                self.__state = SOLVER_OXY
+
+                pt = self.__progtrack
+                # Check to see if we were invoked by solve_uninstall, in
+                # which case we don't want to restart what we've already
+                # started.
+                if self.__progitem is None:
+                        self.__progitem = pt.PLAN_SOLVE_SETUP
+                        pt.plan_start(pt.PLAN_SOLVE_SETUP)
+                self.__start_subphase(1, reset=True)
+
+                return pt
+
+        def __end_solve(self, solution, excludes):
+                """Returns the solution result to the caller after completing
+                all necessary solution cleanup."""
+
+                pt = self.__progtrack
+                self.__end_subphase()  # end the last subphase.
+                pt.plan_done(pt.PLAN_SOLVE_SOLVER)
+                return self.__cleanup((self.__elide_possible_renames(solution,
+                    excludes), (self.__avoid_set, self.__obs_set)))
+
+        def __assert_installed_allowed(self, proposed=None):
+                """Raises a PlanCreationException if the proposed operation
+                would require the removal of installed packages that are not
+                marked for removal by the proposed operation."""
+
+                if proposed is None:
+                        proposed = set()
+
+                # Used to de-dup errors.
+                already_seen = set()
+
+                ret = []
+                for name in (self.__installed_pkgs - proposed -
+                    self.__reject_set - self.__avoid_set):
+                        self.__progress()
+
+                        if (self.__installed_dict[name] in
+                            self.__removal_fmris):
+                                # we're uninstalling this package
+                                continue
+
+                        if name in self.__possible_dict:
+                                continue
+
+                        # no version of this package is allowed
+                        res = self.__fmri_list_errors(
+                            [self.__installed_dict[name]],
+                            already_seen=already_seen)
+
+                        # If no errors returned, that implies that all of the
+                        # reasons the FMRI was rejected aren't interesting.
+                        if res:
+                                ret.extend([_("Package {0} must be uninstalled "
+                                    "before the requested operation can be "
+                                    "performed.").format(name)])
+                                ret.extend(res)
+
+                        # continue processing and accumulate all errors
+                if ret:
+                        self.__raise_solution_error(no_version=ret)
+
+        def __assert_trim_errors(self, possible_set, excludes, proposed=None,
+            proposed_dict=None):
+                """Raises a PlanCreationException if any further trims would
+                prevent the installation or update of proposed or
+                installed/required packages.
+
+                'proposed' is an optional set of FMRI objects representing
+                packages to install or update.
+
+                'proposed_dict' contains user specified FMRIs indexed by
+                pkg_name that should be installed within an image.
+
+                'possible_set' is the set of FMRIs potentially allowed for use
+                in the proposed operation."""
+
+                # make sure all package trims appear
+                self.__trimdone = False
+
+                # First, perform a full trim of the package version space; this
+                # is normally skipped for performance reasons as it's not
+                # required for correctness.
+                self.__update_possible_closure(possible_set, excludes,
+                    full_trim=True)
+
+                # Now try re-asserting that proposed (if any) and installed
+                # packages are allowed after the trimming; these calls will
+                # raise an exception if all the proposed or any of the
+                # installed/required packages are trimmed.
+                self.__trim_proposed(proposed_dict)
+                self.__assign_possible(possible_set)
+                self.__assert_installed_allowed(proposed=proposed)
+
+        def __raise_install_error(self, exp, inc_list, proposed_dict,
+            possible_set, excludes):
+                """Private logic for solve_install() to process a
+                PlanCreationException and re-raise as appropriate.
+
+                'exp' is the related exception object raised by the solver when
+                no solution was found.
+
+                'inc_list' is a list of package FMRIs representing installed
+                incorporations that are being maintained.
+
+                'proposed_dict' contains user specified FMRIs indexed by
+                pkg_name that should be installed within an image.
+
+                'possible_set' is the set of FMRIs potentially allowed for use
+                in the proposed operation.
+                """
+
+                # Before making a guess, apply extra trimming to see if we can
+                # reject the operation based on changing packages.
+                self.__assert_trim_errors(possible_set, excludes,
+                    proposed_dict=proposed_dict)
+
+                # Despite all of the trimming done, we still don't know why the
+                # solver couldn't find a solution, so make a best effort guess
+                # at the reason why.
+                info = []
+                incs = []
+
+                incs.append("")
+                if inc_list:
+                        incs.append("maintained incorporations:")
+                        skey = operator.attrgetter('pkg_name')
+                        for il in sorted(inc_list, key=skey):
+                                incs.append("  {0}".format(il.get_short_fmri()))
+                else:
+                        incs.append("maintained incorporations: None")
+                incs.append("")
+
+                ms = self.__generate_dependency_errors([
+                    b for a in proposed_dict.values()
+                    for b in a
+                ], excludes=excludes)
+                if ms:
+                        info.append("")
+                        info.append(_("Plan Creation: dependency error(s) in "
+                            "proposed packages:"))
+                        info.append("")
+                        for s in ms:
+                                info.append("  {0}".format(s))
+
+                ms = self.__check_installed()
+                if ms:
+                        info.append("")
+                        info.append(_("Plan Creation: Errors in installed "
+                            "packages due to proposed changes:"))
+                        info.append("")
+                        for s in ms:
+                                info.append("  {0}".format(s))
+
+                if not info: # both error detection methods insufficent.
+                        info.append(_("Plan Creation: Package solver is "
+                            "unable to compute solution."))
+                        info.append(_("Dependency analysis is unable to "
+                            "determine exact cause."))
+                        info.append(_("Try specifying expected results to "
+                            "obtain more detailed error messages."))
+                        info.append(_("Include specific version of packages "
+                            "you wish installed."))
+                exp.no_solution = incs + info
+
+                if DebugValues["plan"]:
+                        exp.solver_errors = self.get_trim_errors()
+                raise exp
+
         def solve_install(self, existing_freezes, proposed_dict,
             new_variants=None, excludes=EmptyI,
             reject_set=frozenset(), trim_proposed_installed=True,
@@ -292,22 +746,9 @@
                 installed FMRIs indexed by pkg_name. Used when exact_install
                 is on."""
 
-                # Once solution has been returned or failure has occurred, a new
-                # solver must be used.
-                assert self.__state == SOLVER_INIT
-                self.__state = SOLVER_OXY
-
+                pt = self.__begin_solve()
                 proposed_pkgs = set(proposed_dict)
 
-                pt = self.__progtrack
-                # Check to see if we were invoked by solve_uninstall, in
-                # which case we don't want to restart what we've already
-                # started.
-                if self.__progitem is None:
-                        self.__progitem = pt.PLAN_SOLVE_SETUP
-                        pt.plan_start(pt.PLAN_SOLVE_SETUP)
-                self.__start_subphase(1, reset=True)
-
                 if new_variants:
                         self.__variants = new_variants
 
@@ -315,7 +756,7 @@
                         # Entire packages can be tagged with variants thereby
                         # making those packages uninstallable in certain
                         # images.  So if we're changing variants such that
-                        # some currently installed packages are becomming
+                        # some currently installed packages are becoming
                         # uninstallable add them to the removal package set.
                         #
                         for f in self.__installed_fmris:
@@ -335,33 +776,11 @@
                                 self.__publisher[name] = \
                                     proposed_dict[name][0].publisher
 
-
-                # figure out fmris to be removed from image
-                # we may have installed wrong variants by
-                # mistake due to dependencies; remove them quietly
-
-                self.__removal_fmris |= set([
-                    self.__installed_dict[name]
-                    for name in reject_set
-                    if name in self.__installed_dict
-                ] + [
-                    f
-                    for f in self.__installed_fmris
-                    if not self.__trim_nonmatching_variants(f)
-                ])
-
-                # remove packages to be installed from avoid_set
-                self.__avoid_set -= proposed_pkgs
-                self.__reject_set = reject_set
-
-                # trim fmris that user explicitly disallowed
-                for name in reject_set:
-                        self.__trim(self.__get_catalog_fmris(name),
-                            N_("This version rejected by user request"))
-
-                self.__req_pkg_names = (self.__installed_pkgs |
-                    proposed_pkgs) - reject_set
-
+                # Determine which packages are to be removed, rejected, and
+                # avoided and also determine which ones must not be removed
+                # during the operation.
+                self.__set_removed_and_required_packages(rejected=reject_set,
+                    proposed=proposed_pkgs)
                 self.__progress()
 
                 # find list of incorps we don't let change as a side
@@ -377,7 +796,6 @@
 
                 inc_list, con_lists = self.__get_installed_unbound_inc_list(
                     relax_pkgs, excludes=excludes)
-
                 self.__inc_list = inc_list
 
                 self.__start_subphase(2)
@@ -414,35 +832,32 @@
                         self.__progress()
                         self.__trim(set(self.__get_catalog_fmris(name)) -
                             set(proposed_dict[name]),
-                            N_("This version excluded by specified installation version"))
+                            _TRIM_PROPOSED_VER,
+                            N_("This version excluded by specified "
+                                "installation version"))
                         # trim packages excluded by incorps in proposed.
-                        self.__trim_recursive_incorps(proposed_dict[name], excludes)
+                        self.__trim_recursive_incorps(proposed_dict[name],
+                            excludes)
 
                 self.__start_subphase(3)
                 # now trim pkgs we cannot update due to maintained
                 # incorporations
                 for i, flist in zip(inc_list, con_lists):
                         reason = (N_("This version is excluded by installed "
-                            "incorporation {0}"), (i,))
-                        self.__trim(self.__comb_auto_fmris(i)[1], reason)
+                            "incorporation {0}"), (i.get_short_fmri(
+                                anarchy=True, include_scheme=False),))
+                        self.__trim(self.__comb_auto_fmris(i)[1],
+                            _TRIM_INSTALLED_INC, reason)
                         for f in flist:
-                                self.__trim(self.__comb_auto_fmris(f)[1],
+                                # dotrim=False here as we only want to trim
+                                # packages that don't satisfy the incorporation.
+                                self.__trim(self.__comb_auto_fmris(f,
+                                    dotrim=False)[1], _TRIM_INSTALLED_INC,
                                     reason)
 
                 self.__start_subphase(4)
                 # now trim any pkgs we cannot update due to freezes
-                for f, r, t in existing_freezes:
-                        if r:
-                                reason = (N_("This version is excluded by a "
-                                    "freeze on {0} at version {1}.  The "
-                                    "reason for the freeze is: {2}"),
-                                    (f.pkg_name, f.version, r))
-                        else:
-                                reason = (N_("This version is excluded by a "
-                                    "freeze on {0} at version {1}."),
-                                    (f.pkg_name, f.version))
-                        self.__trim(self.__comb_auto_fmris(f, dotrim=False)[1],
-                            reason)
+                self.__trim_frozen(existing_freezes)
 
                 self.__start_subphase(5)
                 # elide any proposed versions that don't match variants (arch
@@ -453,28 +868,14 @@
 
                 self.__start_subphase(6)
                 # remove any versions from proposed_dict that are in trim_dict
-                ret = []
-                for name in proposed_dict:
-                        tv = self.__dotrim(proposed_dict[name])
-                        if tv:
-                                proposed_dict[name] = tv
-                                continue
-
-                        ret.extend([_("No matching version of %s can be "
-                            "installed:") % name])
-                        ret.extend(self.__fmri_list_errors(proposed_dict[name]))
-                        # continue processing and accumulate all errors
-
-                if ret:
-                        solver_errors = None
-                        if DebugValues["plan"]:
-                                solver_errors = self.get_trim_errors()
-                        raise api_errors.PlanCreationException(
-                            no_version=ret, solver_errors=solver_errors)
+                try:
+                        self.__trim_proposed(proposed_dict)
+                except api_errors.PlanCreationException, exp:
+                        # One or more proposed packages have been rejected.
+                        self.__raise_install_error(exp, inc_list, proposed_dict,
+                            set(), excludes)
 
                 self.__start_subphase(7)
-                # build set of possible pkgs
-
                 # generate set of possible fmris
                 #
                 # ensure existing pkgs stay installed; explicitly add in
@@ -489,103 +890,58 @@
                         possible_set.update(flist)
 
                 self.__start_subphase(8)
-                possible_set.update(self.__generate_dependency_closure(
-                    possible_set, excludes=excludes))
+                # Update the set of possible fmris with the transitive closure
+                # of all dependencies.
+                self.__update_possible_closure(possible_set, excludes)
 
                 self.__start_subphase(9)
                 # trim any non-matching variants, origins or parents
                 for f in possible_set:
                         self.__progress()
-                        if self.__trim_nonmatching_parents(f, excludes,
+                        if not self.__trim_nonmatching_parents(f, excludes,
                             ignore_inst_parent_deps):
-                                if self.__trim_nonmatching_variants(f):
-                                        self.__trim_nonmatching_origins(f,
-                                            excludes, exact_install=exact_install,
-                                            installed_dict_tmp=installed_dict_tmp)
+                                continue
+                        if not self.__trim_nonmatching_variants(f):
+                                continue
+                        self.__trim_nonmatching_origins(f, excludes,
+                            exact_install=exact_install,
+                            installed_dict_tmp=installed_dict_tmp)
 
                 self.__start_subphase(10)
                 # remove all trimmed fmris from consideration
                 possible_set.difference_update(self.__trim_dict.iterkeys())
                 # remove any versions from proposed_dict that are in trim_dict
                 # as trim dict has been updated w/ missing dependencies
-                ret = []
-                for name in proposed_dict:
-                        tv = self.__dotrim(proposed_dict[name])
-                        if tv:
-                                proposed_dict[name] = tv
-                                continue
-
-                        ret.extend([_("No matching version of %s can be "
-                            "installed:") % name])
-                        ret.extend(self.__fmri_list_errors(proposed_dict[name]))
-                        # continue processing and accumulate all errors
-                if ret:
-                        solver_errors = None
-                        if DebugValues["plan"]:
-                                solver_errors = self.get_trim_errors()
-                        raise api_errors.PlanCreationException(
-                            no_version=ret, solver_errors=solver_errors)
+                try:
+                        self.__trim_proposed(proposed_dict)
+                except api_errors.PlanCreationException, exp:
+                        # One or more proposed packages have been rejected.
+                        self.__raise_install_error(exp, inc_list, proposed_dict,
+                            possible_set, excludes)
 
                 self.__start_subphase(11)
-                # generate ids, possible_dict for clause generation
+                #
+                # Generate ids, possible_dict for clause generation.  Prepare
+                # the solver for invocation.
+                #
                 self.__assign_fmri_ids(possible_set)
 
-                # generate clauses for only one version of each package, and for
-                # dependencies for each package.  Do so for all possible fmris.
-
-                for name in self.__possible_dict:
-                        self.__progress()
-                        # Ensure only one version of a package is installed
-                        self.__addclauses(self.__gen_highlander_clauses(
-                            self.__possible_dict[name]))
-                        # generate dependency clauses for each pkg
-                        for fmri in self.__possible_dict[name]:
-                                for da in self.__get_dependency_actions(fmri,
-                                    excludes=excludes):
-                                        self.__addclauses(
-                                            self.__gen_dependency_clauses(fmri,
-                                            da))
+                # Constrain the solution so that only one version of each
+                # package can be installed.
+                self.__enforce_unique_packages(excludes)
 
                 self.__start_subphase(12)
-                # generate clauses for proposed and installed pkgs
-                # note that we create clauses that require one of the
-                # proposed pkgs to work; this allows the possible_set
-                # to always contain the existing pkgs
-
-                for name in proposed_dict:
-                        self.__progress()
-                        self.__addclauses(
-                            self.__gen_one_of_these_clauses(
-                                set(proposed_dict[name]) &
-                                set(self.__possible_dict[name])))
-
-                ret = []
-                for name in self.__installed_pkgs - proposed_pkgs - \
-                    reject_set - self.__avoid_set:
-                        self.__progress()
-
-                        if (self.__installed_dict[name] in
-                            self.__removal_fmris):
-                                continue
-
-                        if name in self.__possible_dict:
-                                self.__addclauses(
-                                    self.__gen_one_of_these_clauses(
-                                        self.__possible_dict[name]))
-                                continue
-
-                        # no version of this package is allowed
-                        ret.extend([_("The installed package %s is not "
-                            "permissible.") % name])
-                        ret.extend(self.__fmri_list_errors(
-                            [self.__installed_dict[name]]))
-                        # continue processing and accumulate all errors
-                if ret:
-                        solver_errors = None
-                        if DebugValues["plan"]:
-                                solver_errors = self.get_trim_errors()
-                        raise api_errors.PlanCreationException(
-                            no_version=ret, solver_errors=solver_errors)
+                # Add proposed and installed packages to solver.
+                self.__generate_operation_clauses(proposed=proposed_pkgs,
+                    proposed_dict=proposed_dict)
+                try:
+                        self.__assert_installed_allowed(proposed=proposed_pkgs)
+                except api_errors.PlanCreationException, exp:
+                        # One or more installed packages can't be retained or
+                        # upgraded.
+                        self.__raise_install_error(exp, inc_list, proposed_dict,
+                            possible_set, excludes)
+
                 pt.plan_done(pt.PLAN_SOLVE_SETUP)
 
                 self.__progitem = pt.PLAN_SOLVE_SOLVER
@@ -598,53 +954,8 @@
                         saved_solution = self.__solve()
                 except api_errors.PlanCreationException, exp:
                         # no solution can be found.
-                        # make sure all package trims appear
-                        self.__trimdone = False
-
-                        info = []
-                        incs = []
-
-                        if inc_list:
-                                incs.append("")
-                                incs.append("maintained incorporations:")
-                                incs.append("")
-                                for il in inc_list:
-                                        incs.append("  %s" % il)
-                        else:
-                                incs.append("")
-                                incs.append("maintained incorporations: None")
-                                incs.append("")
-
-                        ms = self.__generate_dependency_errors(
-                            [ b for a in proposed_dict.values() for b in a ],
-                            excludes=excludes)
-
-                        if ms:
-                                info.append("")
-                                info.append(_("Plan Creation: dependency error(s) in proposed packages:"))
-                                info.append("")
-                                for s in ms:
-                                        info.append("  %s" % s)
-                        ms = self.__check_installed()
-
-                        if ms:
-                                info.append("")
-                                info.append(_("Plan Creation: Errors in installed packages due to proposed changes:"))
-                                info.append("")
-                                for s in ms:
-                                        info.append("  %s" % s)
-                        if not info: # both error detection methods insufficent.
-                                info.append(_("Plan Creation: Package solver is unable to compute solution."))
-                                info.append(_("Dependency analysis is unable to determine exact cause."))
-                                info.append(_("Try specifying expected results to obtain more detailed error messages."))
-                                info.append(_("Include specific version of packages you wish installed."))
-                        exp.no_solution = incs + info
-
-                        solver_errors = None
-                        if DebugValues["plan"]:
-                                exp.solver_errors = self.get_trim_errors()
-                        raise exp
-
+                        self.__raise_install_error(exp, inc_list, proposed_dict,
+                            possible_set, excludes)
 
                 self.__start_subphase(14)
                 # we have a solution that works... attempt to
@@ -684,13 +995,9 @@
 
                 solution = self.__solve()
                 self.__progress()
-
                 solution = self.__update_solution_set(solution, excludes)
 
-                self.__end_subphase()  # end the last subphase.
-                pt.plan_done(pt.PLAN_SOLVE_SOLVER)
-                return self.__cleanup((self.__elide_possible_renames(solution,
-                    excludes), (self.__avoid_set, self.__obs_set)))
+                return self.__end_solve(solution, excludes)
 
         def solve_update_all(self, existing_freezes, excludes=EmptyI,
             reject_set=frozenset()):
@@ -708,40 +1015,13 @@
                 not be currently installed.)
                 """
 
-                # Once solution has been returned or failure has occurred, a new
-                # solver must be used.
-                assert self.__state == SOLVER_INIT
-                self.__state = SOLVER_OXY
-
-                pt = self.__progtrack
-                self.__progitem = pt.PLAN_SOLVE_SETUP
-                pt.plan_start(pt.PLAN_SOLVE_SETUP)
-
-                self.__start_subphase(1, reset=True)
-
-                # figure out fmris to be removed from image
-                # we may have installed wrong variants by
-                # mistake due to dependencies; remove them quietly
-
-                self.__removal_fmris = frozenset([
-                    self.__installed_dict[name]
-                    for name in reject_set
-                    if name in self.__installed_dict
-                ] + [
-                    f
-                    for f in self.__installed_fmris
-                    if not self.__trim_nonmatching_variants(f)
-                ])
-                self.__reject_set = reject_set
-
+                pt = self.__begin_solve()
+
+                # Determine which packages are to be removed, rejected, and
+                # avoided and also determine which ones must not be removed
+                # during the operation.
+                self.__set_removed_and_required_packages(rejected=reject_set)
                 self.__progress()
-                # trim fmris that user explicitly disallowed
-                for name in reject_set:
-                        self.__trim(self.__get_catalog_fmris(name),
-                            N_("This version rejected by user request"))
-
-                self.__progress()
-                self.__req_pkg_names = self.__installed_pkgs - reject_set
 
                 # trim fmris we cannot install because they're older
                 for f in self.__installed_fmris:
@@ -749,18 +1029,7 @@
                         self.__trim_older(f)
 
                 # now trim any pkgs we cannot update due to freezes
-                for f, r, t in existing_freezes:
-                        if r:
-                                reason = (N_("This version is excluded by a "
-                                    "freeze on {0} at version {1}.  The "
-                                    "reason for the freeze is: {2}"),
-                                    (f.pkg_name, f.version, r))
-                        else:
-                                reason = (N_("This version is excluded by a "
-                                    "freeze on {0} at version {1}."),
-                                    (f.pkg_name, f.version))
-                        self.__trim(self.__comb_auto_fmris(f, dotrim=False)[1],
-                            reason)
+                self.__trim_frozen(existing_freezes)
 
                 self.__start_subphase(2)
                 # generate set of possible fmris
@@ -773,8 +1042,9 @@
                         possible_set |= matching
 
                 self.__start_subphase(3)
-                possible_set.update(self.__generate_dependency_closure(
-                    possible_set, excludes=excludes))
+                # Update the set of possible FMRIs with the transitive closure
+                # of all dependencies.
+                self.__update_possible_closure(possible_set, excludes)
 
                 # trim any non-matching origins or parents
                 for f in possible_set:
@@ -782,64 +1052,45 @@
                                 if self.__trim_nonmatching_variants(f):
                                         self.__trim_nonmatching_origins(f,
                                             excludes)
-                pt.plan_done(pt.PLAN_SOLVE_SETUP)
 
                 self.__start_subphase(4)
-                self.__progitem = pt.PLAN_SOLVE_SOLVER
-                pt.plan_start(pt.PLAN_SOLVE_SOLVER)
 
                 # remove all trimmed fmris from consideration
                 possible_set.difference_update(self.__trim_dict.iterkeys())
 
-                # generate ids, possible_dict for clause generation
+                #
+                # Generate ids, possible_dict for clause generation.  Prepare
+                # the solver for invocation.
+                #
                 self.__assign_fmri_ids(possible_set)
 
-                # generate clauses for only one version of each package, and for
-                # dependencies for each package.  Do so for all possible fmris.
-
-                for name in self.__possible_dict:
-                        # Ensure only one version of a package is installed
-                        self.__addclauses(self.__gen_highlander_clauses(
-                            self.__possible_dict[name]))
-                        # generate dependency clauses for each pkg
-                        for fmri in self.__possible_dict[name]:
-                                for da in self.__get_dependency_actions(fmri,
-                                    excludes=excludes):
-                                        self.__addclauses(
-                                            self.__gen_dependency_clauses(fmri,
-                                                da))
+                # Constrain the solution so that only one version of each
+                # package can be installed.
+                self.__enforce_unique_packages(excludes)
 
                 self.__start_subphase(5)
-                # generate clauses for installed pkgs
-                ret = []
-                for name in self.__installed_pkgs - self.__avoid_set:
-                        if (self.__installed_dict[name] in
-                            self.__removal_fmris):
-                                # we're uninstalling this package
-                                continue
-
-                        if name in self.__possible_dict:
-                                self.__progress()
-                                self.__addclauses(
-                                    self.__gen_one_of_these_clauses(
-                                    self.__possible_dict[name]))
-                                continue
-
-                        # no version of this package is allowed
-                        ret.extend([_("The installed package %s is not "
-                            "permissible.") % name])
-                        ret.extend(self.__fmri_list_errors(
-                            [self.__installed_dict[name]]))
-                        # continue processing and accumulate all errors
-                if ret:
-                        solver_errors = None
-                        if DebugValues["plan"]:
-                                solver_errors = self.get_trim_errors()
-                        raise api_errors.PlanCreationException(
-                            no_version=ret, solver_errors=solver_errors)
-
+                # Add installed packages to solver.
+                self.__generate_operation_clauses()
+                try:
+                        self.__assert_installed_allowed()
+                except api_errors.PlanCreationException:
+                        # Attempt a full trim to see if we can raise a sensible
+                        # error.  If not, re-raise.
+                        self.__assert_trim_errors(possible_set, excludes)
+                        raise
+
+                pt.plan_done(pt.PLAN_SOLVE_SETUP)
+
+                self.__progitem = pt.PLAN_SOLVE_SOLVER
+                pt.plan_start(pt.PLAN_SOLVE_SOLVER)
                 self.__start_subphase(6)
-                solution = self.__solve()
+                try:
+                        solution = self.__solve()
+                except api_errors.PlanCreationException:
+                        # No solution can be found; attempt a full trim to see
+                        # if we can raise a sensible error.  If not, re-raise.
+                        self.__assert_trim_errors(possible_set, excludes)
+                        raise
 
                 self.__update_solution_set(solution, excludes)
 
@@ -847,60 +1098,67 @@
                         if self.__fmri_is_obsolete(f):
                                 solution.remove(f)
 
-                # check if we cannot upgrade (heuristic)
-                if solution == self.__installed_fmris:
-                        # no solution can be found.
-                        incorps = self.__get_installed_upgradeable_incorps(excludes)
-                        if incorps:
-                                info = []
-                                info.append(_("Plan Creation: Package solver has not found a solution to update to latest available versions."))
-                                info.append(_("This may indicate an overly constrained set of packages are installed."))
-                                info.append(" ")
-                                info.append(_("latest incorporations:"))
-                                info.append(" ")
-                                info.extend(("  %s" % f for f in incorps))
-                                ms = self.__generate_dependency_errors(incorps,
-                                    excludes=excludes)
-                                ms.extend(self.__check_installed())
-
-                                if ms:
-                                        info.append(" ")
-                                        info.append(_("The following indicates why the system cannot update to the latest version:"))
-                                        info.append(" ")
-                                        for s in ms:
-                                                info.append("  %s" % s)
-                                else:
-                                        info.append(_("Dependency analysis is unable to determine exact cause."))
-                                        info.append(_("Try specifying expected results to obtain more detailed error messages."))
-
-                                solver_errors = None
-                                if DebugValues["plan"]:
-                                        solver_errors = self.get_trim_errors()
-                                raise api_errors.PlanCreationException(
-                                    no_solution=info,
-                                    solver_errors=solver_errors)
-                self.__end_subphase()   # end last subphase
-                pt.plan_done(pt.PLAN_SOLVE_SOLVER)
-
-                return self.__cleanup((self.__elide_possible_renames(solution,
-                    excludes), (self.__avoid_set, self.__obs_set)))
+                # If solution doesn't match installed set of packages, then an
+                # upgrade solution was found (heuristic):
+                if solution != self.__installed_fmris:
+                        return self.__end_solve(solution, excludes)
+
+                incorps = self.__get_installed_upgradeable_incorps(
+                    excludes)
+                if not incorps:
+                        # If there are no installed, upgradeable incorporations,
+                        # then assume that no updates were available.
+                        return self.__end_solve(solution, excludes)
+
+                # Before making a guess, apply extra trimming to see if we can
+                # reject the operation based on changing packages.
+                self.__assert_trim_errors(possible_set, excludes)
+
+                # Despite all of the trimming done, we still don't know why the
+                # solver couldn't find a solution, so make a best-effort guess
+                # at the reason why.
+                skey = operator.attrgetter('pkg_name')
+                info = []
+                info.append(_("No solution found to update to latest available "
+                    "versions."))
+                info.append(_("This may indicate an overly constrained set of "
+                    "packages are installed."))
+                info.append(" ")
+                info.append(_("latest incorporations:"))
+                info.append(" ")
+                info.extend((
+                    "  {0}".format(f)
+                    for f in sorted(incorps, key=skey)
+                ))
+                info.append(" ")
+
+                ms = self.__generate_dependency_errors(incorps,
+                    excludes=excludes)
+                ms.extend(self.__check_installed())
+
+                if ms:
+                        info.append(_("The following indicates why the system "
+                            "cannot update to the latest version:"))
+                        info.append(" ")
+                        for s in ms:
+                                info.append("  {0}".format(s))
+                else:
+                        info.append(_("Dependency analysis is unable to "
+                            "determine the cause."))
+                        info.append(_("Try specifying expected versions to "
+                            "obtain more detailed error messages."))
+
+                self.__raise_solution_error(no_solution=info)
 
         def solve_uninstall(self, existing_freezes, uninstall_list, excludes,
             ignore_inst_parent_deps=False):
                 """Compute changes needed for uninstall"""
 
-                # Once solution has been returned or failure has occurred, a new
-                # solver must be used.
-                assert self.__state == SOLVER_INIT
-
-                pt = self.__progtrack
-                self.__progitem = pt.PLAN_SOLVE_SETUP
-                pt.plan_start(pt.PLAN_SOLVE_SETUP)
+                self.__begin_solve()
 
                 # generate list of installed pkgs w/ possible renames removed to
                 # forestall failing removal due to presence of unneeded renamed
                 # pkg
-
                 orig_installed_set = self.__installed_fmris
                 renamed_set = orig_installed_set - \
                     self.__elide_possible_renames(orig_installed_set, excludes)
@@ -920,6 +1178,7 @@
 
                 # Run it through the solver; with more complex dependencies
                 # we're going to be out of luck without it.
+                self.__state = SOLVER_INIT # reset to initial state
                 return self.solve_install(existing_freezes, {},
                     excludes=excludes, reject_set=reject_set,
                     ignore_inst_parent_deps=ignore_inst_parent_deps)
@@ -961,13 +1220,12 @@
                 return ret
 
         def __save_solver(self):
-                """Create a saved copy of the current solver state and return it"""
+                """Duplicate current current solver state and return it."""
                 return (self.__addclause_failure,
-                        pkg.solver.msat_solver(self.__solver))
+                    pkg.solver.msat_solver(self.__solver))
 
         def __restore_solver(self, solver):
                 """Set the current solver state to the previously saved one"""
-
                 self.__addclause_failure, self.__solver = solver
                 self.__iterations = 0
 
@@ -1007,11 +1265,7 @@
                         self.__addclauses([[-i for i in solution_vector]])
 
                 if not self.__iterations:
-                        solver_errors = None
-                        if DebugValues["plan"]:
-                                solver_errors = self.get_trim_errors()
-                        raise api_errors.PlanCreationException(no_solution=True,
-                            solver_errors=solver_errors)
+                        self.__raise_solution_error(no_solution=True)
 
                 self.__state = SOLVER_SUCCESS
 
@@ -1026,28 +1280,33 @@
                     if self.__solver.dereference(i)
                 ])
 
-        def __assign_fmri_ids(self, possible_set):
-                """ give a set of possible fmris, assign ids"""
+        def __assign_possible(self, possible_set):
+                """Assign __possible_dict of possible package FMRIs by pkg stem
+                and mark trimming complete."""
 
                 # generate dictionary of possible pkgs fmris by pkg stem
-
                 self.__possible_dict.clear()
-                self.__poss_set |= possible_set
 
                 for f in possible_set:
                         self.__possible_dict[f.pkg_name].append(f)
                 for name in self.__possible_dict:
                         self.__possible_dict[name].sort()
+                self.__trimdone = True
+
+        def __assign_fmri_ids(self, possible_set):
+                """ give a set of possible fmris, assign ids"""
+
+                self.__assign_possible(possible_set)
+
                 # assign clause numbers (ids) to possible pkgs
                 pkgid = 1
-                for name in sorted(self.__possible_dict.keys()):
+                for name in sorted(self.__possible_dict.iterkeys()):
                         for fmri in reversed(self.__possible_dict[name]):
                                 self.__id2fmri[pkgid] = fmri
                                 self.__fmri2id[fmri] = pkgid
                                 pkgid += 1
 
                 self.__variables = pkgid - 1
-                self.__trimdone = True
 
         def __getid(self, fmri):
                 """Translate fmri to variable number (id)"""
@@ -1060,8 +1319,10 @@
         def __get_fmris_by_version(self, pkg_name):
                 """Cache for catalog entries; helps performance"""
                 if pkg_name not in self.__cache:
-                        self.__cache[pkg_name] = \
-                            [t for t in self.__catalog.fmris_by_version(pkg_name)]
+                        self.__cache[pkg_name] = [
+                            t
+                            for t in self.__catalog.fmris_by_version(pkg_name)
+                        ]
                 return self.__cache[pkg_name]
 
         def __get_catalog_fmris(self, pkg_name):
@@ -1071,12 +1332,12 @@
 
                 if self.__trimdone:
                         return self.__possible_dict.get(pkg_name, [])
-                else:
-                        return [
-                                f
-                                for tp in self.__get_fmris_by_version(pkg_name)
-                                for f in tp[1]
-                                ]
+
+                return [
+                    f
+                    for tp in self.__get_fmris_by_version(pkg_name)
+                    for f in tp[1]
+                ]
 
         def __comb_newer_fmris(self, fmri, dotrim=True, obsolete_ok=True):
                 """Returns tuple of set of fmris that are matched within
@@ -1102,9 +1363,9 @@
                         pass
 
                 mver = fmri.version
-                # Always use a copy; return value may be cached.
-                all_fmris = self.__get_catalog_fmris(fmri.pkg_name)[:]
-                all_fmris.reverse()
+                # Always use a copy in reverse order (so versions are in
+                # descending order); return value may be cached.
+                all_fmris = self.__get_catalog_fmris(fmri.pkg_name)[::-1]
 
                 # frozensets are used so callers don't inadvertently
                 # update these sets (which may be cached).  Iteration is
@@ -1116,7 +1377,6 @@
                 for i, f in enumerate(all_fmris):
                         if mver == f.version:
                                 last_ver = i
-                                continue
                         elif last_ver is not None:
                                 break
 
@@ -1135,6 +1395,101 @@
                 self.__cache[tp] = (matching, remaining)
                 return self.__cache[tp]
 
+        def __comb_common_noversion(self, fmri, dotrim, obsolete_ok):
+                """Implements versionless comb logic."""
+
+                all_fmris = self.__get_catalog_fmris(fmri.pkg_name)
+                matching = frozenset((
+                    f
+                    for f in all_fmris
+                    if not dotrim or f not in self.__trim_dict
+                    if obsolete_ok or not self.__fmri_is_obsolete(f)
+                ))
+                remaining = frozenset(set(all_fmris) - matching)
+                return matching, remaining
+
+        def __comb_common_version(self, fmri, dotrim, constraint, obsolete_ok):
+                """Implements versioned comb logic."""
+
+                # If using a version constraint that cares about branch (but not
+                # timestr), the fmris will have to be resorted so that the
+                # version chopping done here works as expected.  This is because
+                # version sort order is release, branch, timestr which is
+                # different than is_successor() order.  However, if the provided
+                # FMRI has a timestamp, doesn't have a branch, or we're applying
+                # a constraint that doesn't care about the branch, then we don't
+                # need to resort.
+                mver = fmri.version
+                branch_sort = not mver.timestr and mver.branch and \
+                    constraint not in (version.CONSTRAINT_NONE,
+                        version.CONSTRAINT_RELEASE,
+                        version.CONSTRAINT_RELEASE_MAJOR,
+                        version.CONSTRAINT_RELEASE_MINOR)
+
+                all_fmris = self.__get_catalog_fmris(fmri.pkg_name)
+                if branch_sort:
+                        # The first version of this attempted to perform
+                        # multiple passes to avoid the cost of sorting by
+                        # finding the last entry that matched CONSTRAINT_RELEASE
+                        # and then only resorting the slice of comb_fmris from
+                        # first_ver to last_ver, but that actually ended up
+                        # being slower because multiple passes with
+                        # is_successor() (even over a small portion of
+                        # comb_fmris) is more expensive than simply resorting
+                        # the entire list.  Ideally, we'd get the entries from
+                        # __get_catalog_fmris() in this order already which
+                        # would be faster since we'd avoid a second sort.
+                        skey = operator.attrgetter(
+                            'version.branch', 'version.release')
+                        # Always use a copy; return value may be cached.
+                        comb_fmris = sorted(all_fmris, key=skey,
+                            reverse=True)
+                else:
+                        # Always use a copy; return value may be cached.
+                        comb_fmris = all_fmris[::-1]
+
+                # Iteration is performed in descending version order with the
+                # assumption that systems are generally up-to-date so it should
+                # be faster to start at the end and look for the oldest version
+                # that matches.
+                first_ver = None
+                last_ver = None
+                for i, f in enumerate(comb_fmris):
+                        fver = f.version
+                        if ((fver.is_successor(mver, constraint=constraint) or
+                            fver == mver)):
+                                if first_ver is None:
+                                        first_ver = i
+                                last_ver = i
+                        elif last_ver is not None:
+                                break
+
+                if last_ver is not None:
+                        # Oddly enough, it's a little bit faster to iterate
+                        # through the slice of comb_fmris again and store
+                        # matches here instead of above.  Perhaps variable
+                        # scoping overhead is to blame?
+                        matching = []
+                        remaining = []
+                        for f in comb_fmris[first_ver:last_ver + 1]:
+                                if ((not dotrim or
+                                    f not in self.__trim_dict) and
+                                    (obsolete_ok or not
+                                        self.__fmri_is_obsolete(f))):
+                                        matching.append(f)
+                                else:
+                                        remaining.append(f)
+                        matching = frozenset(matching)
+                        remaining = frozenset(chain(
+                            comb_fmris[:first_ver],
+                            remaining,
+                            comb_fmris[last_ver + 1:]))
+                else:
+                        matching = frozenset()
+                        remaining = frozenset(comb_fmris)
+
+                return matching, remaining
+
         def __comb_common(self, fmri, dotrim, constraint, obsolete_ok):
                 """Underlying impl. of other comb routines"""
 
@@ -1145,62 +1500,14 @@
                 if (not self.__trimdone and dotrim) or tp not in self.__cache:
                         # use frozensets so callers don't inadvertently update
                         # these sets (which may be cached).
-                        all_fmris = self.__get_catalog_fmris(fmri.pkg_name)
-                        mver = fmri.version
-                        if not mver:
-                                matching = frozenset((
-                                    f
-                                    for f in all_fmris
-                                    if not dotrim or f not in self.__trim_dict
-                                    if obsolete_ok or not self.__fmri_is_obsolete(f)
-                                ))
-                                remaining = frozenset(set(all_fmris) - matching)
+                        if not fmri.version or not fmri.version.release:
+                                matching, remaining = \
+                                    self.__comb_common_noversion(fmri, dotrim,
+                                        obsolete_ok)
                         else:
-                                # Always use a copy; return value may be cached.
-                                all_fmris = all_fmris[:]
-                                all_fmris.reverse()
-
-                                # Iteration is performed in descending version
-                                # order with the assumption that systems are
-                                # generally up-to-date so it should be faster to
-                                # start at the end and look for the oldest
-                                # version that matches.
-                                first_ver = None
-                                last_ver = None
-                                for i, f in enumerate(all_fmris):
-                                        fver = f.version
-                                        if (fver.is_successor(mver,
-                                            constraint=constraint) or \
-                                                fver == mver):
-                                                if first_ver is None:
-                                                        first_ver = i
-                                                last_ver = i
-                                        elif last_ver is not None:
-                                                break
-
-                                if last_ver is not None:
-                                        # Oddly enough, it's a little bit faster
-                                        # to iterate through the slice of
-                                        # all_fmris again and store matches here
-                                        # instead of above.  Perhaps variable
-                                        # scoping overhead is to blame?
-                                        matching = []
-                                        remaining = []
-                                        for f in all_fmris[first_ver:last_ver + 1]:
-                                                if ((not dotrim or
-                                                    f not in self.__trim_dict) and
-                                                    (obsolete_ok or not
-                                                        self.__fmri_is_obsolete(f))):
-                                                        matching.append(f)
-                                                else:
-                                                        remaining.append(f)
-                                        matching = frozenset(matching)
-                                        remaining = frozenset(chain(remaining,
-                                            all_fmris[:first_ver],
-                                            all_fmris[last_ver + 1:]))
-                                else:
-                                        matching = frozenset()
-                                        remaining = frozenset(all_fmris)
+                                matching, remaining = \
+                                    self.__comb_common_version(fmri, dotrim,
+                                        constraint, obsolete_ok)
 
                         # if we haven't finished trimming, don't cache this
                         if not self.__trimdone:
@@ -1217,22 +1524,24 @@
                     obsolete_ok=obsolete_ok)
                 if not dotrim:
                         return older, newer
-                else:
-                        # we're going to return the older packages, so we need
-                        # to make sure that any trimmed packages are removed
-                        # from the matching set and added to the non-matching
-                        # ones.
-                        trimmed_older = set([
-                                f
-                                for f in older
-                                if f in self.__trim_dict
-                                ])
-                        return older - trimmed_older, newer | trimmed_older
+
+                # we're going to return the older packages, so we need
+                # to make sure that any trimmed packages are removed
+                # from the matching set and added to the non-matching
+                # ones.
+                trimmed_older = set([
+                    f
+                    for f in older
+                    if f in self.__trim_dict
+                ])
+                return older - trimmed_older, newer | trimmed_older
 
         def __comb_auto_fmris(self, fmri, dotrim=True, obsolete_ok=True):
                 """Returns tuple of set of fmris that are match within
-                CONSTRAINT.AUTO of specified version and set of remaining fmris."""
-                return self.__comb_common(fmri, dotrim, version.CONSTRAINT_AUTO, obsolete_ok)
+                CONSTRAINT.AUTO of specified version and set of remaining
+                fmris."""
+                return self.__comb_common(fmri, dotrim, version.CONSTRAINT_AUTO,
+                    obsolete_ok)
 
         def __fmri_loadstate(self, fmri, excludes):
                 """load fmri state (obsolete == True, renamed == True)"""
@@ -1250,7 +1559,7 @@
                         # Trim package entries that have unparseable action data
                         # so that they can be filtered out later.
                         self.__fmri_state[fmri] = ("false", "false")
-                        self.__trim(fmri, N_("Package contains invalid or unsupported actions"))
+                        self.__trim_unsupported(fmri)
                         return
 
                 self.__fmri_state[fmri] = (
@@ -1287,17 +1596,15 @@
                         ]
                         return self.__depcache[fmri]
                 except api_errors.InvalidPackageErrors:
-                        if trim_invalid:
-                                # Trim package entries that have unparseable
-                                # action data so that they can be filtered out
-                                # later.
-                                self.__fmri_state[fmri] = ("false", "false")
-                                self.__trim(fmri, N_("Package contains invalid "
-                                    "or unsupported actions"))
-                                return []
-                        else:
+                        if not trim_invalid:
                                 raise
 
+                        # Trim package entries that have unparseable action data
+                        # so that they can be filtered out later.
+                        self.__fmri_state[fmri] = ("false", "false")
+                        self.__trim_unsupported(fmri)
+                        return []
+
         def __get_variant_dict(self, fmri):
                 """Return dictionary of variants suppported by fmri"""
                 try:
@@ -1308,27 +1615,30 @@
                         # Trim package entries that have unparseable action data
                         # so that they can be filtered out later.
                         self.__variant_dict[fmri] = {}
-                        self.__trim(fmri, N_("Package contains invalid or unsupported actions"))
+                        self.__trim_unsupported(fmri)
                 return self.__variant_dict[fmri]
 
         def __generate_dependency_closure(self, fmri_set, excludes=EmptyI,
-            dotrim=True):
+            dotrim=True, full_trim=False):
                 """return set of all fmris the set of specified fmris could
                 depend on; while trimming those packages that cannot be
                 installed"""
 
-                needs_processing = fmri_set
+                # Use a copy of the set provided by the caller to prevent
+                # unexpected modification!
+                needs_processing = set(fmri_set)
                 already_processed = set()
 
-                while (needs_processing):
+                while needs_processing:
                         self.__progress()
                         fmri = needs_processing.pop()
                         already_processed.add(fmri)
                         needs_processing |= (self.__generate_dependencies(fmri,
-                            excludes, dotrim) - already_processed)
+                            excludes, dotrim, full_trim) - already_processed)
                 return already_processed
 
-        def __generate_dependencies(self, fmri, excludes=EmptyI, dotrim=True):
+        def __generate_dependencies(self, fmri, excludes=EmptyI, dotrim=True,
+            full_trim=False):
                 """return set of direct (possible) dependencies of this pkg;
                 trim those packages whose dependencies cannot be satisfied"""
                 try:
@@ -1336,17 +1646,24 @@
                              f
                              for da in self.__get_dependency_actions(fmri,
                                  excludes)
-                             # check most common ones first
+                             # check most common ones first; what is checked
+                             # here is a matter of optimization / messaging, not
+                             # correctness.
                              if da.attrs["type"] == "require" or
                                  da.attrs["type"] == "group" or
                                  da.attrs["type"] == "conditional" or
-                                 da.attrs["type"] == "require-any"
+                                 da.attrs["type"] == "require-any" or
+                                 (full_trim and (
+                                     da.attrs["type"] == "incorporate" or
+                                     da.attrs["type"] == "optional" or
+                                     da.attrs["type"] == "exclude"))
                              for f in self.__parse_dependency(da, fmri,
                                  dotrim, check_req=True)[1]
                         ])
 
                 except DependencyException, e:
-                        self.__trim(fmri, e.reason, e.fmris)
+                        self.__trim(fmri, e.reason_id, e.reason,
+                            fmri_adds=e.fmris)
                         return set([])
 
         def __elide_possible_renames(self, fmris, excludes=EmptyI):
@@ -1365,7 +1682,10 @@
                 if not renamed_fmris:
                         return set(fmris)
 
-                fmris_by_name = dict(((pfmri.pkg_name, pfmri) for pfmri in fmris))
+                fmris_by_name = dict(
+                    (pfmri.pkg_name, pfmri)
+                    for pfmri in fmris
+                )
 
                 # figure out which renamed fmris have dependencies; compute
                 # transitively so we can handle multiple renames
@@ -1431,8 +1751,9 @@
                         processed.add(frozenset(fmris))
                         d = self.__combine_incorps(fmris, excludes)
                         for name in d:
-                                self.__trim(d[name][1],
-                                    (N_("Excluded by proposed incorporation '{0}'"), (fmris[0].pkg_name,)))
+                                self.__trim(d[name][1], _TRIM_PROPOSED_INC,
+                                    (N_("Excluded by proposed incorporation "
+                                        "'{0}'"), (fmris[0].pkg_name,)))
                                 to_do = d[name][0]
                                 if to_do and frozenset(to_do) not in processed:
                                         work.append(list(to_do))
@@ -1448,17 +1769,19 @@
                 ]
                 # The following ignores constraints that appear in only some of
                 # the versions.  This also handles obsoletions & renames.
-                all_keys = reduce(set.intersection, (set(d.keys()) for d in dict_list))
+                all_keys = reduce(set.intersection,
+                    (set(d.keys()) for d in dict_list))
 
                 return dict(
-                        (k,
-                         (reduce(set.union,
-                                 (d.get(k, (set(), set()))[0]
-                                  for d in dict_list)),
-                          reduce(set.intersection,
-                                 (d.get(k, (set(), set()))[1]
-                                  for d in dict_list))))
-                        for k in all_keys)
+                    (k,
+                     (reduce(set.union,
+                         (d.get(k, (set(), set()))[0]
+                          for d in dict_list)),
+                      reduce(set.intersection,
+                         (d.get(k, (set(), set()))[1]
+                          for d in dict_list))))
+                    for k in all_keys
+                )
 
 
         def __get_incorp_nonmatch_dict(self, fmri, excludes):
@@ -1472,7 +1795,7 @@
                     excludes=excludes):
                         if da.attrs["type"] != "incorporate":
                                 continue
-                        nm, m, c, d, r, f = self.__parse_dependency(da, fmri,
+                        nm, m, _c, _d, _r, f = self.__parse_dependency(da, fmri,
                             dotrim=False)
                         # Collect all incorp. dependencies affecting
                         # a package in a list.  Note that it is
@@ -1492,9 +1815,8 @@
                             reduce(set.union, ret[pkg_name][1]))
                 return ret
 
-        def __parse_dependency(self, dependency_action, fmri,
+        def __parse_dependency(self, dependency_action, source,
             dotrim=True, check_req=False):
-
                 """Return tuple of (disallowed fmri list, allowed fmri list,
                 conditional_list, dependency_type, required)"""
 
@@ -1506,17 +1828,29 @@
                         except KeyError:
                                 fmri = pkg.fmri.PkgFmri(fmristr, "5.11")
                                 self.__fmridict[fmristr] = fmri
+
+                        if not self.__depend_ts:
+                                fver = fmri.version
+                                if fver and fver.timestr:
+                                        # Include timestamp in all error
+                                        # output for dependencies.
+                                        self.__depend_ts = True
+
                         fmris.append(fmri)
 
                 fmri = fmris[0]
 
-                required = True     # true if match is required for containing pkg
-                conditional = None  # if this dependency has conditional fmris
-                obsolete_ok = False # true if obsolete pkgs satisfy this dependency
+                # true if match is required for containing pkg
+                required = True
+                # if this dependency has conditional fmris
+                conditional = None
+                # true if obsolete pkgs satisfy this dependency
+                obsolete_ok = False
 
                 if dtype == "require":
                         matching, nonmatching = \
-                            self.__comb_newer_fmris(fmri, dotrim, obsolete_ok=obsolete_ok)
+                            self.__comb_newer_fmris(fmri, dotrim,
+                                obsolete_ok=obsolete_ok)
 
                 elif dtype == "optional":
                         obsolete_ok = True
@@ -1541,6 +1875,8 @@
                             obsolete_ok=obsolete_ok)
                         if fmri.pkg_name not in self.__req_pkg_names:
                                 required = False
+                        # Track packages that deliver incorporate deps.
+                        self.__known_incs.add(source.pkg_name)
 
                 elif dtype == "conditional":
                         cond_fmri = pkg.fmri.PkgFmri(
@@ -1548,12 +1884,12 @@
                         conditional, nonmatching = self.__comb_newer_fmris(
                             cond_fmri, dotrim, obsolete_ok=obsolete_ok)
                         # Required is only really helpful for solver error
-                        # messaging.  At this point in time, there isn't enough
-                        # information to determine whether the dependency will
-                        # be required or not, so setting this to True leads to
-                        # false positives for error conditions.  As such, this
-                        # should always be False for now.
-                        required = False
+                        # messaging.  The only time we know that this dependency
+                        # is required is when the predicate package must be part
+                        # of the solution.
+                        if cond_fmri.pkg_name not in self.__req_pkg_names:
+                                required = False
+
                         matching, nonmatching = \
                             self.__comb_newer_fmris(fmri, dotrim,
                             obsolete_ok=obsolete_ok)
@@ -1599,31 +1935,61 @@
                                 required = False
                                 matching = nonmatching = frozenset()
                         else:
-                                matching, nonmatching = self.__comb_newer_fmris(fmri,
-                                    dotrim, obsolete_ok=obsolete_ok)
+                                matching, nonmatching = self.__comb_newer_fmris(
+                                    fmri, dotrim, obsolete_ok=obsolete_ok)
 
                 else: # only way this happens is if new type is incomplete
                         raise api_errors.InvalidPackageErrors(
-                            "Unknown dependency type %s" % dtype)
+                            "Unknown dependency type {0}".format(dtype))
 
                 # check if we're throwing exceptions and we didn't find any
                 # matches on a required package
-
                 if not check_req or matching or not required:
-                        return nonmatching, matching, conditional, dtype, required, fmri
+                        return (nonmatching, matching, conditional, dtype,
+                            required, fmri)
+
+                # Neither build or publisher is interesting for dependencies.
+                fstr = fmri.get_fmri(anarchy=True, include_scheme=False)
 
                 # we're going to toss an exception
                 if dtype == "exclude":
+                        # If we reach this point, we know that a required
+                        # package (already installed or proposed) was excluded.
                         matching, nonmatching = self.__comb_older_fmris(
                             fmri, dotrim=False, obsolete_ok=False)
-                        if not matching:
+
+                        # Determine if excluded package is already installed.
+                        installed = False
+                        for f in nonmatching:
+                                if f in self.__installed_fmris:
+                                        installed = True
+                                        break
+
+                        if not matching and installed:
+                                # The exclude dependency doesn't allow the
+                                # version of the package that is already
+                                # installed.
                                 raise DependencyException(
-                                    (N_("Package contains 'exclude' dependency {0} on installed package"),
-                                    (fmri,)))
+                                    _TRIM_INSTALLED_EXCLUDE,
+                                    (N_("Package contains 'exclude' dependency "
+                                        "{0} on installed package"), (fstr,)))
+                        elif not matching and not installed:
+                                # The exclude dependency doesn't allow any
+                                # version of the package that is proposed.
+                                raise DependencyException(
+                                    _TRIM_INSTALLED_EXCLUDE,
+                                    (N_("Package contains 'exclude' dependency "
+                                        "{0} on proposed package"), (fstr,)))
                         else:
+                                # All versions of the package allowed by the
+                                # exclude dependency were trimmed by other
+                                # dependencies.  If changed, update _fmri_errors
+                                # _TRIM_DEP_TRIMMED.
                                 raise DependencyException(
-                                    (N_("All versions matching 'exclude' dependency {0} are rejected"),
-                                    (fmri,)), matching)
+                                    _TRIM_DEP_TRIMMED,
+                                    (N_("No version allowed by 'exclude' "
+                                        "dependency {0} could be installed"),
+                                        (fstr,)), matching)
                         # not reached
                 elif dtype == "incorporate":
                         matching, nonmatching = \
@@ -1649,12 +2015,21 @@
                         if matching:
                                 if len(fmris) == 1:
                                         raise DependencyException(
-                                            (N_("All acceptable versions of '{0}' dependency on {1} are obsolete"),
-                                            (dtype, fmri)))
+                                            _TRIM_DEP_OBSOLETE,
+                                            (N_("All acceptable versions of "
+                                                "'{0}' dependency on {1} are "
+                                                "obsolete"), (dtype, fstr)))
                                 else:
+                                        sfmris = [
+                                            fmri.get_fmri(anarchy=True,
+                                                include_scheme=False)
+                                            for f in fmris
+                                        ]
                                         raise DependencyException(
-                                            (N_("All acceptable versions of '{0}' dependencies on {1} are obsolete"),
-                                            (dtype, fmris)))
+                                            _TRIM_DEP_OBSOLETE,
+                                            (N_("All acceptable versions of "
+                                                "'{0}' dependencies on {1} are "
+                                                "obsolete"), (dtype, sfmris)))
                         # something else is wrong
                         matching, nonmatching = self.__comb_newer_fmris(fmri,
                             dotrim=False, obsolete_ok=obsolete_ok)
@@ -1664,14 +2039,15 @@
                             dotrim=False, obsolete_ok=obsolete_ok)
 
                 if not matching:
-                        raise DependencyException(
-                            (N_("A version for '{0}' dependency on {1} cannot be found"),
-                            (dtype, fmri)))
+                        raise DependencyException(_TRIM_DEP_MISSING,
+                            (N_("No version for '{0}' dependency on {1} can "
+                                "be found"), (dtype, fstr)))
                 else:
-                        raise DependencyException(
-                            (N_("All versions matching '{0}' dependency {1} are rejected"),
-                            (dtype, fmri)),
-                            matching)
+                        # If changed, update _fmri_errors _TRIM_DEP_TRIMMED.
+                        raise DependencyException(_TRIM_DEP_TRIMMED,
+                            (N_("No version matching '{0}' dependency {1} "
+                                "can be installed"),
+                                (dtype, fstr)), matching)
 
         def __relax_parent_self_constrained(self, excludes, \
             ignore_inst_parent_deps):
@@ -1710,11 +2086,12 @@
 
                 needs_processing = set(fmri_list)
                 already_processed = set()
+                already_seen = set()
 
                 while needs_processing:
                         fmri = needs_processing.pop()
                         errors, newfmris = self.__do_error_work(fmri,
-                            excludes)
+                            excludes, already_seen)
                         ret.extend(errors)
                         already_processed.add(fmri)
                         needs_processing |= newfmris - already_processed
@@ -1730,109 +2107,229 @@
                 assert DebugValues["plan"]
 
                 return self.__fmri_list_errors(self.__trim_dict.iterkeys(),
-                    already_seen=set())
+                    already_seen=set(), verbose=True)
 
         def __check_installed(self):
                 """Generate list of strings describing why currently
                 installed packages cannot be installed, or empty list"""
+
+                # Used to de-dup errors.
+                already_seen = set()
+
                 ret = []
                 for f in self.__installed_fmris - self.__removal_fmris:
-                        matching, nonmatching = \
-                            self.__comb_newer_fmris(f, dotrim=True, obsolete_ok=True)
+                        matching = self.__comb_newer_fmris(f, dotrim=True,
+                            obsolete_ok=True)[0]
                         if matching:
                                 continue
-                        # there are no matches when disallowed packages are excluded
-                        matching, nonmatching = \
-                            self.__comb_newer_fmris(f, dotrim=False, obsolete_ok=True)
-
-                        ret.append(_("No suitable version of installed package %s found") % f)
-                        ret.extend(self.__fmri_list_errors(matching))
+                        # no matches when disallowed packages are excluded
+                        matching = self.__comb_newer_fmris(f, dotrim=False,
+                            obsolete_ok=True)[0]
+
+                        ret.append(_("No suitable version of installed package "
+                            "{0} found").format(f.pkg_name))
+                        ret.extend(self.__fmri_list_errors(matching,
+                            already_seen=already_seen))
 
                 return ret
 
-        def __fmri_list_errors(self, fmri_list, indent="", already_seen=None):
-                """Given a list of fmris, return indented strings why they don't work"""
+        def __fmri_list_errors(self, fmri_list, indent="", already_seen=None,
+            omit=None, verbose=False):
+                """Given a list of FMRIs, return indented strings indicating why
+                they were rejected."""
                 ret = []
 
-                fmri_reasons = [
-                        self.__fmri_errors(f, indent, already_seen)
-                        for f in sorted(fmri_list)
-                        ]
+                if omit is None:
+                        omit = set()
+
+                fmri_reasons = []
+                skey = operator.attrgetter('pkg_name')
+                for f in sorted(fmri_list, key=skey):
+                        res = self.__fmri_errors(f, indent,
+                            already_seen=already_seen, omit=omit,
+                            verbose=verbose)
+                        # If None was returned, that implies that all of the
+                        # reasons the FMRI was rejected aren't interesting.
+                        if res is not None:
+                                fmri_reasons.append(res)
+
+                last_run = []
+                def collapse_fmris():
+                        """Collapse a range of FMRIs into format:
+
+                           first_fmri
+                             to
+                           last_fmri
+
+                           ...based on verbose state."""
+
+                        if last_run:
+                                indent = last_run.pop(0)
+                                if verbose or len(last_run) <= 1:
+                                        ret.extend(last_run)
+                                elif (not self.__depend_ts and
+                                    ret[-1].endswith(last_run[-1].strip())):
+                                        # If timestamps are not being displayed
+                                        # and the last FMRI is the same as the
+                                        # first in the range then we only need
+                                        # to show the first.
+                                        pass
+                                else:
+                                        ret.append(indent + "  " + _("to"))
+                                        ret.append(last_run[-1])
+                        last_run[::] = []
 
                 last_reason = None
                 for fmri_id, reason in fmri_reasons:
                         if reason == last_reason:
-                                ret.extend([" " * len(fmri_id[0]) + fmri_id[1]])
+                                indent = " " * len(fmri_id[0])
+                                if not last_run:
+                                        last_run.append(indent)
+                                last_run.append(indent + fmri_id[1])
                                 continue
                         else: # ends run
+                                collapse_fmris()
                                 if last_reason:
                                         ret.extend(last_reason)
-                                ret.extend([fmri_id[0] + fmri_id[1]])
+                                ret.append(fmri_id[0] + fmri_id[1])
                                 last_reason = reason
                 if last_reason:
+                        collapse_fmris()
                         ret.extend(last_reason)
                 return ret
 
-        def __fmri_errors(self, fmri, indent="", already_seen=None):
-                """return a list of strings w/ indents why this fmri is not suitable"""
+        def __fmri_errors(self, fmri, indent="", already_seen=None,
+            omit=None, verbose=False):
+                """return a list of strings w/ indents why this fmri is not
+                suitable"""
 
                 if already_seen is None:
                         already_seen = set()
-
-                fmri_id = [_("%s  Reject:  ") % indent, str(fmri)]
+                if omit is None:
+                        omit = set()
+
+                fmri_id = [_("{0}  Reject:  ").format(indent)]
+                if not verbose and not self.__depend_ts:
+                        # Exclude build and timestamp for brevity.
+                        fmri_id.append(fmri.get_short_fmri())
+                else:
+                        # Include timestamp for clarity if any dependency
+                        # included a timestamp; exclude build for brevity.
+                        fmri_id.append(fmri.get_fmri())
 
                 tag = _("Reason:")
 
                 if fmri in already_seen:
+                        if fmri in omit:
+                                return
+
                         # note to translators: 'indent' will be a series of
                         # whitespaces.
-                        reason = _("%(indent)s  %(tag)s  [already rejected; "
-                            "see above]") % {"indent": indent, "tag": tag}
+                        reason = _("{indent}  {tag}  [already rejected; see "
+                            "above]").format(indent=indent, tag=tag)
                         return fmri_id, [reason]
 
                 already_seen.add(fmri)
 
+                if not verbose:
+                        # By default, omit packages from errors that were
+                        # rejected due to a newer version being installed, or
+                        # because they didn't match user-specified input.  It's
+                        # tempting to omit _TRIM_REJECT here as well, but that
+                        # leads to some very mysterious errors for
+                        # administrators if the only reason an operation failed
+                        # is because a required dependency was rejected.
+                        for reason_id, reason_t, fmris in \
+                            self.__trim_dict[fmri]:
+                                if reason_id in (_TRIM_INSTALLED_NEWER,
+                                    _TRIM_PROPOSED_PUB, _TRIM_PROPOSED_VER):
+                                        omit.add(fmri)
+                                        return
+
                 ms = []
-
-                for reason_t, fmris in sorted(self.__trim_dict[fmri]):
+                for reason_id, reason_t, fmris in sorted(
+                    self.__trim_dict[fmri]):
                         if isinstance(reason_t, tuple):
                                 reason = _(reason_t[0]).format(*reason_t[1])
                         else:
                                 reason = _(reason_t)
-                        ms.append("%s  %s  %s" % (indent, tag, reason))
-                        tag = " " * len(tag)
-                        ms.extend(self.__fmri_list_errors([
-                            f
-                            for f in fmris
-                            if f not in already_seen
-                            ], indent + "  ", already_seen))
+
+                        ms.append("{0}  {1}  {2}".format(indent, tag, reason))
+
+                        if reason in already_seen:
+                                # If we've already explained why something was
+                                # rejected before, skip it.
+                                continue
+
+                        # Use the reason text and not the id, as the text is
+                        # specific to a particular rejection.
+                        already_seen.add(reason)
+
+                        # By default, don't include error output for
+                        # dependencies on incorporation packages that don't
+                        # specify a version since any version-specific
+                        # dependencies will have caused a rejection elsewhere.
+                        if (not verbose and
+                            reason_id == _TRIM_DEP_TRIMMED and
+                            len(reason_t[1]) == 2):
+                                dtype, fstr = reason_t[1]
+                                if dtype == "require" and "@" not in fstr:
+                                        # Assumes fstr does not include
+                                        # publisher or scheme.
+                                        if fstr in self.__known_incs:
+                                                continue
+
+                        # Add the reasons why each package version that
+                        # satisfied a dependency was rejected.
+                        res = self.__fmri_list_errors([
+                                f
+                                for f in sorted(fmris)
+                                if f not in already_seen
+                                if verbose or f not in omit
+                            ],
+                            indent + "  ",
+                            already_seen=already_seen,
+                            omit=omit,
+                            verbose=verbose
+                        )
+
+                        if res:
+                                ms.append(indent + "    " + ("-" * 40))
+                                ms.extend(res)
+                                ms.append(indent + "    " + ("-" * 40))
+
                 return fmri_id, ms
 
-        def __do_error_work(self, fmri, excludes):
+        def __do_error_work(self, fmri, excludes, already_seen):
+                """Private helper function used by __generate_dependency_errors
+                to determine why packages were rejected."""
 
                 needs_processing = set()
 
                 if fmri in self.__trim_dict:
-                        return self.__fmri_list_errors([fmri]), needs_processing
+                        return self.__fmri_list_errors([fmri],
+                            already_seen=already_seen), needs_processing
 
                 for a in self.__get_dependency_actions(fmri, excludes):
                         try:
-                                match = self.__parse_dependency(a, fmri,
-                                   check_req=True)[1]
+                                matching = self.__parse_dependency(a, fmri,
+                                    check_req=True)[1]
                         except DependencyException, e:
-                                self.__trim(fmri, e.reason, e.fmris)
-                                s = _("No suitable version of required package %s found:") % fmri
-                                return [s] + self.__fmri_list_errors([fmri]), set()
-                        needs_processing |= match
+                                self.__trim(fmri, e.reason_id, e.reason,
+                                    fmri_adds=e.fmris)
+                                s = _("No suitable version of required package "
+                                    "{0} found:").format(fmri.pkg_name)
+                                return ([s] + self.__fmri_list_errors([fmri],
+                                    already_seen=already_seen),
+                                    set())
+                        needs_processing |= matching
                 return [], needs_processing
 
-
         # clause generation routines
-
         def __gen_dependency_clauses(self, fmri, da, dotrim=True):
                 """Return clauses to implement this dependency"""
-                nm, m, cond, dtype, req, depf = self.__parse_dependency(da, fmri,
-                    dotrim)
+                nm, m, cond, dtype, _req, _depf = self.__parse_dependency(da,
+                    fmri, dotrim)
 
                 if dtype == "require" or dtype == "require-any":
                         return self.__gen_require_clauses(fmri, m)
@@ -1842,14 +2339,14 @@
                         else:
                                 return self.__gen_require_clauses(fmri, m)
                 elif dtype == "conditional":
-                        return self.__gen_require_conditional_clauses(fmri, m, cond)
+                        return self.__gen_require_conditional_clauses(fmri, m,
+                            cond)
                 elif dtype in ["origin", "parent"]:
                         # handled by trimming proposed set, not by solver
                         return []
                 else:
                         return self.__gen_negation_clauses(fmri, nm)
 
-
         def __gen_highlander_clauses(self, fmri_list):
                 """Return a list of clauses that specifies only one or zero
                 of the fmris in fmri_list may be installed.  This prevents
@@ -1875,10 +2372,10 @@
                 l = len(id_list)
 
                 return [
-                        [id_list[i], id_list[j]]
-                        for i in range(l-1)
-                        for j in range(i+1, l)
-                        ]
+                    [id_list[i], id_list[j]]
+                    for i in range(l-1)
+                    for j in range(i+1, l)
+                ]
 
         def __gen_require_clauses(self, fmri, matching_fmri_list):
                 """generate clause for require dependency: if fmri is
@@ -1887,9 +2384,9 @@
                 # !a.1 | b.2 | b.3 | b.4
 
                 return [
-                        [-self.__getid(fmri)] +
-                        [self.__getid(fmri) for fmri in matching_fmri_list]
-                        ]
+                    [-self.__getid(fmri)] +
+                    [self.__getid(fmri) for fmri in matching_fmri_list]
+                ]
 
         def __gen_require_conditional_clauses(self, fmri, matching_fmri_list,
             conditional_fmri_list):
@@ -1902,9 +2399,9 @@
                 mlist = [self.__getid(f) for f in matching_fmri_list]
 
                 return [
-                        [-self.__getid(fmri)] + [-self.__getid(c)] + mlist
-                        for c in conditional_fmri_list
-                        ]
+                    [-self.__getid(fmri)] + [-self.__getid(c)] + mlist
+                    for c in conditional_fmri_list
+                ]
 
         def __gen_negation_clauses(self, fmri, non_matching_fmri_list):
                 """ generate clauses for optional, incorporate and
@@ -1939,41 +2436,110 @@
                                         self.__addclause_failure = True
                                 self.__clauses += 1
                         except TypeError:
-                                e = _("List of integers, not %s, expected") % c
-                                raise TypeError, e
+                                raise TypeError(_("List of integers, not {0}, "
+                                    "expected").format(c))
+
+        def __get_child_holds(self, install_holds, pkg_cons, inc_set):
+                """Returns the list of installed packages that are incorporated
+                by packages, delivering an install-hold, and that do not have an
+                install-hold but incorporate packages.
+
+                'install_holds' is a dict of installed package stems indicating
+                the pkg.depend.install-hold delivered by the package that are
+                not being removed.
+
+                'pkg_cons' is a dict of installed package fmris and the
+                incorporate constraints they deliver.
+
+                'inc_set' is a list of packages that incorporate other packages
+                and deliver install-hold actions.  It acts as the starting point
+                where we fan out to find "child" packages that incorporate other
+                packages."""
+
+                unprocessed = set(inc_set)
+                processed = set()
+                proc_cons = set()
+                incorps = set()
+
+                while unprocessed:
+                        self.__progress()
+                        ifmri = unprocessed.pop()
+                        processed.add(ifmri)
+
+                        if ifmri in self.__removal_fmris:
+                                # This package will be removed, so
+                                # nothing to do.
+                                continue
+
+                        cons = pkg_cons.get(ifmri, [])
+                        if cons and ifmri.pkg_name not in install_holds:
+                                # If this package incorporates other
+                                # packages and does not deliver an
+                                # install-hold, then consider it a
+                                # 'child' hold.
+                                incorps.add(ifmri)
+
+                        # Find all incorporation constraints that result
+                        # in only one possible match.  If there is only
+                        # one possible match for an incorporation
+                        # constraint then that package will not be
+                        # upgraded and should be checked for
+                        # incorporation constraints.
+                        for con in cons:
+                                if (con.pkg_name in install_holds or
+                                    con in proc_cons):
+                                        # Already handled.
+                                        continue
+                                matching = list(
+                                    self.__comb_auto_fmris(con)[0])
+                                if len(matching) == 1:
+                                        if matching[0] not in processed:
+                                                unprocessed.add(matching[0])
+                                else:
+                                        # Track which constraints have
+                                        # already been processed
+                                        # seperately from which
+                                        # package FMRIs have been
+                                        # processed to avoid (unlikely)
+                                        # collision.
+                                        proc_cons.add(con)
+
+                return incorps
 
         def __get_installed_upgradeable_incorps(self, excludes=EmptyI):
-                """Return the latest version of installed upgradeable incorporations w/ install holds"""
+                """Return the latest version of installed upgradeable
+                incorporations w/ install holds"""
+
                 installed_incs = []
-
                 for f in self.__installed_fmris - self.__removal_fmris:
                         for d in self.__catalog.get_entry_actions(f,
-                            [catalog.Catalog.DEPENDENCY],
-                            excludes=excludes):
-                                if d.name == "set" and d.attrs["name"] == "pkg.depend.install-hold":
+                            [catalog.Catalog.DEPENDENCY], excludes=excludes):
+                                if (d.name == "set" and d.attrs["name"] ==
+                                    "pkg.depend.install-hold"):
                                         installed_incs.append(f)
 
                 ret = []
                 for f in installed_incs:
-                        match, unmatch = self.__comb_newer_fmris(f, dotrim=False)
-                        latest = sorted(match, reverse=True)[0]
+                        matching = self.__comb_newer_fmris(f, dotrim=False)[0]
+                        latest = sorted(matching, reverse=True)[0]
                         if latest != f:
                                 ret.append(latest)
                 return ret
 
-        def __get_installed_unbound_inc_list(self, proposed_pkgs, excludes=EmptyI):
+        def __get_installed_unbound_inc_list(self, proposed_pkgs,
+            excludes=EmptyI):
                 """Return the list of incorporations that are to not to change
-                during this install operation, and the lists of fmris they constrain."""
+                during this install operation, and the lists of fmris they
+                constrain."""
 
                 incorps = set()
                 versioned_dependents = set()
                 pkg_cons = {}
                 install_holds = {}
 
-                # determine installed packages that contain incorporation dependencies,
-                # determine those packages that are depended on by explict version,
-                # and those that have pkg.depend.install-hold values.
-
+                # Determine installed packages that contain incorporation
+                # dependencies, those packages that are depended on by explict
+                # version, and those that have pkg.depend.install-hold values.
                 for f in self.__installed_fmris - self.__removal_fmris:
                         for d in self.__catalog.get_entry_actions(f,
                             [catalog.Catalog.DEPENDENCY],
@@ -1992,53 +2558,73 @@
                                                 fmris.append(tmp)
                                         if d.attrs["type"] == "incorporate":
                                                 incorps.add(f.pkg_name)
-                                                pkg_cons.setdefault(f, []).append(fmris[0])
-                                        for fmri in fmris:
-                                                if fmri.version is not None:
-                                                        versioned_dependents.add(fmri.pkg_name)
-                                elif d.name == "set" and d.attrs["name"] == "pkg.depend.install-hold":
-                                        install_holds[f.pkg_name] = d.attrs["value"]
-
-                # find install holds that appear on command line and are thus relaxed
+                                                pkg_cons.setdefault(f,
+                                                    []).append(fmris[0])
+                                        versioned_dependents.update(
+                                            fmri.pkg_name
+                                            for fmri in fmris
+                                            if fmri.version is not None
+                                        )
+                                elif (d.name == "set" and d.attrs["name"] ==
+                                    "pkg.depend.install-hold"):
+                                        install_holds[f.pkg_name] = \
+                                            d.attrs["value"]
+
+                # find install holds that appear on command line and are thus
+                # relaxed
                 relaxed_holds = set([
-                        install_holds[name]
-                        for name in proposed_pkgs
-                        if name in install_holds
-                        ])
-                # add any other install holds that are relaxed because they have values
-                # that start w/ the relaxed ones...
+                    install_holds[name]
+                    for name in proposed_pkgs
+                    if name in install_holds
+                ])
+
+                # add any other install holds that are relaxed because they have
+                # values that start w/ the relaxed ones...
                 relaxed_holds |= set([
-                        hold
-                        for hold in install_holds.values()
-                        if [ r for r in relaxed_holds if hold.startswith(r + ".") ]
-                        ])
-                # versioned_dependents contains all the packages that are depended on
-                # w/ a explicit version.  We now modify this list so that it does not
-                # contain any packages w/ install_holds, unless those holds were
-                # relaxed.
+                    hold
+                    for hold in install_holds.itervalues()
+                    if [ r for r in relaxed_holds if hold.startswith(r + ".") ]
+                ])
+
+                # Expand the list of install holds to include packages that are
+                # incorporated by packages delivering an install-hold and that
+                # do not have an install-hold, but incorporate packages.
+                child_holds = self.__get_child_holds(install_holds, pkg_cons,
+                    set(inc for inc in pkg_cons
+                        if inc.pkg_name in install_holds and
+                        install_holds[inc.pkg_name] not in relaxed_holds
+                    )
+                )
+
+                for child_hold in child_holds:
+                        assert child_hold.pkg_name not in install_holds
+                        install_holds[child_hold.pkg_name] = child_hold.pkg_name
+
+                # versioned_dependents contains all the packages that are
+                # depended on w/ a explicit version.  We now modify this list so
+                # that it does not contain any packages w/ install_holds, unless
+                # those holds were relaxed.
                 versioned_dependents -= set([
                     pkg_name
                     for pkg_name, hold_value in install_holds.iteritems()
                     if hold_value not in relaxed_holds
-                    ])
+                ])
                 # Build the list of fmris that 1) contain incorp. dependencies
-                # 2) are not in the set of versioned_dependents and 3) do
-                # not explicitly appear on the install command line.
+                # 2) are not in the set of versioned_dependents and 3) do not
+                # explicitly appear on the install command line.
+                installed_dict = self.__installed_dict
                 ret = [
-                    self.__installed_dict[pkg_name]
+                    installed_dict[pkg_name]
                     for pkg_name in incorps - versioned_dependents
                     if pkg_name not in proposed_pkgs
-                    if self.__installed_dict[pkg_name] not in self.__removal_fmris
+                    if installed_dict[pkg_name] not in self.__removal_fmris
                 ]
-                # For each incorporation above that will not change, return a list
-                # of the fmris that incorporation constrains
+                # For each incorporation above that will not change, return a
+                # list of the fmris that incorporation constrains
                 con_lists = [
-                        [
-                        i
-                        for i in pkg_cons[inc]
-                        ]
-                        for inc in ret
-                        ]
+                    [ i for i in pkg_cons[inc] ]
+                    for inc in ret
+                ]
 
                 return ret, con_lists
 
@@ -2058,55 +2644,63 @@
                 self.__mark_pub_trimmed(pkg_name)
 
                 fmri_list = self.__get_catalog_fmris(pkg_name)
-                version_dict = {}
-
 
                 if pkg_name in self.__publisher:
                         acceptable_pubs = [self.__publisher[pkg_name]]
                         if pkg_name in self.__installed_dict:
-                                reason = (N_("Currently installed package '{0}' is from sticky publisher '{1}'."),
+                                reason_id = _TRIM_PUB_STICKY
+                                reason = (N_("Currently installed package "
+                                    "'{0}' is from sticky publisher '{1}'."),
                                     (pkg_name, self.__publisher[pkg_name]))
                         else:
-                                reason = N_("Package is from publisher other than specified one.")
+                                reason_id = _TRIM_PROPOSED_PUB
+                                reason = N_("Package is from publisher other "
+                                    "than specified one.")
                 else:
                         # order by pub_rank; choose highest possible tier for
-                        # pkgs; guard against unconfigured publishers in known catalog
+                        # pkgs; guard against unconfigured publishers in known
+                        # catalog
                         pubs_found = set((f.publisher for f in fmri_list))
                         ranked = sorted([
-                                        (self.__pub_ranks[p][0], p)
-                                        for p in pubs_found
-                                        if self.__pub_ranks.get(p, (0, False, False))[2]
-                                        ])
-                        acceptable_pubs = [ r[1]
-                                            for r in ranked
-                                            if r[0] == ranked[0][0]
-                                            ]
+                            (self.__pub_ranks[p][0], p)
+                            for p in pubs_found
+                            if self.__pub_ranks.get(p, (0, False, False))[2]
+                        ])
+                        acceptable_pubs = [
+                            r[1]
+                            for r in ranked
+                            if r[0] == ranked[0][0]
+                        ]
+                        reason_id = _TRIM_PUB_RANK
                         if acceptable_pubs:
-                                reason = (N_("Higher ranked publisher {0} was selected"), (acceptable_pubs[0],))
+                                reason = (N_("Higher ranked publisher {0} was "
+                                    "selected"), (acceptable_pubs[0],))
                         else:
-                                reason = N_("Package publisher is ranked lower in search order")
+                                reason = N_("Package publisher is ranked lower "
+                                    "in search order")
 
                 # allow installed packages to co-exist to meet dependency reqs.
-                # in case new publisher not proper superset of original.
-                # avoid multiple publishers w/ the exact same fmri to prevent
+                # in case new publisher not proper superset of original.  avoid
+                # multiple publishers w/ the exact same fmri to prevent
                 # thrashing in the solver due to many equiv. solutions.
                 inst_f = self.__installed_dict.get(pkg_name)
-                self.__trim((
+                self.__trim([
                     f
                     for f in fmri_list
                     if (f.publisher not in acceptable_pubs and
                             (not inst_f or f != inst_f)) or
                         (inst_f and f.publisher != inst_f.publisher and
                             f.version == inst_f.version)
-                ), reason)
+                ], reason_id, reason)
 
         # routines to manage the trim dictionary
-        # trim dictionary contains the reasons an fmri was rejected for consideration
-        # reason is a tuple of a string w/ format chars and args, or just a string.
-        # fmri_adds are any fmris that caused the rejection
-
-        def __trim(self, fmri_list, reason, fmri_adds=EmptyI):
-                """Remove specified fmri(s) from consideration for specified reason"""
+        # trim dictionary contains the reasons an fmri was rejected for
+        # consideration reason is a tuple of a string w/ format chars and args,
+        # or just a string.  fmri_adds are any fmris that caused the rejection
+
+        def __trim(self, fmri_list, reason_id, reason, fmri_adds=EmptyI):
+                """Remove specified fmri(s) from consideration for specified
+                reason."""
 
                 self.__progress()
                 try:
@@ -2114,7 +2708,8 @@
                 except TypeError:
                         it = [fmri_list]
 
-                tup = (reason, frozenset(fmri_adds))
+                assert reason_id in range(_TRIM_MAX)
+                tup = (reason_id, reason, frozenset(fmri_adds))
 
                 for fmri in it:
                         self.__trim_dict[fmri].add(tup)
@@ -2122,23 +2717,37 @@
         def __trim_older(self, fmri):
                 """Trim any fmris older than this one"""
                 reason = (N_("Newer version {0} is already installed"), (fmri,))
-                self.__trim(self.__comb_newer_fmris(fmri, dotrim=False)[1], reason)
+                self.__trim(self.__comb_newer_fmris(fmri, dotrim=False)[1],
+                    _TRIM_INSTALLED_NEWER, reason)
 
         def __trim_nonmatching_variants(self, fmri):
+                """Trim packages that don't support image architecture or other
+                image variant."""
+
                 vd = self.__get_variant_dict(fmri)
                 reason = ""
 
                 for v in self.__variants.keys():
                         if v in vd and self.__variants[v] not in vd[v]:
                                 if vd == "variant.arch":
-                                        reason = N_("Package doesn't support image architecture")
+                                        reason = N_("Package doesn't support "
+                                            "image architecture")
                                 else:
-                                        reason = (N_("Package doesn't support image variant {0}"), (v,))
-
-                                self.__trim(fmri, reason)
+                                        reason = (N_("Package supports image "
+                                            "variant {0}=[{1}] but doesn't "
+                                            "support this image's {0} ({2})"),
+                                            (v, vd[v], self.__variants[v]))
+
+                                self.__trim(fmri, _TRIM_VARIANT, reason)
                 return reason == ""
 
         def __trim_nonmatching_parents1(self, pkg_fmri, fmri):
+                """Private helper function for __trim_nonmatching_parents that
+                trims any pkg_fmri that matches a parent dependency and that is
+                not installed in the parent image, that is from a different
+                publisher than the parent image, or that is a different version
+                than the parent image."""
+
                 if fmri in self.__parent_pkgs:
                         # exact fmri installed in parent
                         return True
@@ -2151,7 +2760,7 @@
                         else:
                                 reason = (N_("Package {0} is not installed in "
                                     "parent image."), (fmri.pkg_name,))
-                        self.__trim(pkg_fmri, reason)
+                        self.__trim(pkg_fmri, _TRIM_PARENT_MISSING, reason)
                         return False
 
                 pf = self.__parent_dict[fmri.pkg_name]
@@ -2163,7 +2772,7 @@
                         else:
                                 reason = (N_("Package in parent is from a "
                                     "different publisher: {0}"), (pf,))
-                        self.__trim(pkg_fmri, reason)
+                        self.__trim(pkg_fmri, _TRIM_PARENT_PUB, reason)
                         return False
 
                 if pf.version == fmri.version or pf.version.is_successor(
@@ -2174,6 +2783,7 @@
                 # version mismatch
                 if pf.version.is_successor(fmri.version,
                     version.CONSTRAINT_NONE):
+                        reason_id = _TRIM_PARENT_NEWER
                         if self.__is_zone():
                                 reason = (N_("Global zone has a "
                                     "newer version: {0}"), (pf,))
@@ -2181,6 +2791,7 @@
                                 reason = (N_("Parent image has a "
                                     "newer version: {0}"), (pf,))
                 else:
+                        reason_id = _TRIM_PARENT_OLDER
                         if self.__is_zone():
                                 reason = (N_("Global zone has an older "
                                     "version of package: {0}"), (pf,))
@@ -2188,7 +2799,7 @@
                                 reason = (N_("Parent image has an older "
                                     "version of package: {0}"), (pf,))
 
-                self.__trim(pkg_fmri, reason)
+                self.__trim(pkg_fmri, reason_id, reason)
                 return False
 
         def __trim_nonmatching_parents(self, pkg_fmri, excludes,
@@ -2245,41 +2856,44 @@
                         req_fmri = pkg.fmri.PkgFmri(da.attrs["fmri"], "5.11")
 
                         if da.attrs.get("root-image", "").lower() == "true":
-                                if req_fmri.pkg_name.startswith("feature/firmware/"):
+                                if req_fmri.pkg_name.startswith(
+                                    "feature/firmware/"):
                                         # this is a firmware dependency
                                         fw_ok, reason = \
                                             self.__firmware.check_firmware(da,
                                             req_fmri.pkg_name)
                                         if not fw_ok:
-                                                self.__trim(fmri, reason)
+                                                self.__trim(fmri,
+                                                    _TRIM_FIRMWARE, reason)
                                                 return False
                                         continue
-                                else:
-                                        if self.__root_fmris is None:
-                                                img = pkg.client.image.Image(
-                                                    misc.liveroot(),
-                                                    allow_ondisk_upgrade=False,
-                                                    user_provided_dir=True,
-                                                    should_exist=True)
-                                                self.__root_fmris = dict([
-                                                    (f.pkg_name, f)
-                                                    for f in img.gen_installed_pkgs()
-                                                ])
-
-                                        installed = self.__root_fmris.get(
-                                            req_fmri.pkg_name, None)
-                                        reason = (N_("Installed version in root image "
-                                            "is too old for origin dependency {0}"),
-                                            (req_fmri,))
+                                if self.__root_fmris is None:
+                                        img = pkg.client.image.Image(
+                                            misc.liveroot(),
+                                            allow_ondisk_upgrade=False,
+                                            user_provided_dir=True,
+                                            should_exist=True)
+                                        self.__root_fmris = dict([
+                                            (f.pkg_name, f)
+                                            for f in img.gen_installed_pkgs()
+                                        ])
+
+                                installed = self.__root_fmris.get(
+                                    req_fmri.pkg_name)
+                                reason_id = _TRIM_INSTALLED_ROOT_ORIGIN
+                                reason = (N_("Installed version in root image "
+                                    "is too old for origin " "dependency {0}"),
+                                    (req_fmri,))
                         else:
                                 # Always use the full installed dict for origin
                                 # dependency.
                                 if exact_install:
                                         installed = installed_dict_tmp.get(
-                                            req_fmri.pkg_name, None)
+                                            req_fmri.pkg_name)
                                 else:
                                         installed = self.__installed_dict.get(
-                                            req_fmri.pkg_name, None)
+                                            req_fmri.pkg_name)
+                                reason_id = _TRIM_INSTALLED_ORIGIN
                                 reason = (N_("Installed version in image "
                                     "being upgraded is too old for origin "
                                     "dependency {0}"), (req_fmri,))
@@ -2288,27 +2902,30 @@
                         # otherwise these sorts of cross-environment
                         # dependencies don't work well
 
-                        if not installed or \
-                            not req_fmri.version or \
-                            req_fmri.version == installed.version or \
-                            installed.version.is_successor(req_fmri.version, version.CONSTRAINT_NONE):
+                        if (not installed or not req_fmri.version or
+                            req_fmri.version == installed.version or
+                            installed.version.is_successor(req_fmri.version,
+                                version.CONSTRAINT_NONE)):
                                 continue
 
-                        self.__trim(fmri, reason)
+                        self.__trim(fmri, reason_id, reason)
 
                         return False
                 return True
 
+        def __trim_unsupported(self, fmri):
+                """Indicate given package FMRI is unsupported."""
+                self.__trim(fmri, _TRIM_UNSUPPORTED,
+                    N_("Package contains invalid or unsupported actions"))
+
         def __dotrim(self, fmri_list):
                 """Return fmri_list trimmed of any fmris in self.__trim_dict"""
 
-
-                ret = [
-                        f
-                        for f in fmri_list
-                        if f not in self.__trim_dict
-                        ]
-                return ret
+                return [
+                    f
+                    for f in fmri_list
+                    if f not in self.__trim_dict
+                ]
 
         def __is_zone(self):
                 """Return True if image is a nonglobal zone"""
--- a/src/modules/fmri.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/modules/fmri.py	Wed Nov 19 09:58:50 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import fnmatch
@@ -317,9 +317,11 @@
                         pkg_str = "pkg://"
                 return "%s%s/%s" % (pkg_str, self.publisher, self.pkg_name)
 
-        def get_short_fmri(self, default_publisher=None, anarchy=False):
+        def get_short_fmri(self, default_publisher=None, anarchy=False,
+            include_scheme=True):
                 """Return a string representation of the FMRI without a specific
                 version."""
+                pkg_str = ""
                 publisher = self.publisher
                 if not publisher:
                         publisher = default_publisher
@@ -329,11 +331,16 @@
                 else:
                         version = "@" + self.version.get_short_version()
 
-                if not publisher or publisher.startswith(PREF_PUB_PFX) \
-                    or anarchy:
-                        return "pkg:/%s%s" % (self.pkg_name, version)
+                if (not publisher or publisher.startswith(PREF_PUB_PFX) or
+                    anarchy):
+                        if include_scheme:
+                                pkg_str = "pkg:/"
+                        return "%s%s%s" % (pkg_str, self.pkg_name, version)
 
-                return "pkg://%s/%s%s" % (publisher, self.pkg_name, version)
+                if include_scheme:
+                        pkg_str = "pkg://"
+                return "%s%s/%s%s" % (pkg_str, publisher, self.pkg_name,
+                    version)
 
         def get_fmri(self, default_publisher=None, anarchy=False,
             include_scheme=True):
--- a/src/setup.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/setup.py	Wed Nov 19 09:58:50 2014 +0530
@@ -328,6 +328,7 @@
         'pkg.client.__init__',
         'pkg.client.api',
         'pkg.client.linkedimage',
+        'pkg.client.pkg_solver',
         'pkg.client.pkgdefs',
         'pkg.client.pkgremote',
         'pkg.client.plandesc',
--- a/src/tests/api/t_linked_image.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/tests/api/t_linked_image.py	Wed Nov 19 09:58:50 2014 +0530
@@ -48,7 +48,9 @@
 
 p_update_index = 0
 
-def substring_verify(string, substring):
+def pkg_err_verify(string, fmri):
+        # Ignore package version as how it's displayed can change.
+        substring = fmri.split("@", 1)[0]
         if string.find(substring) == -1:
                 raise RuntimeError("""
 Expected "%s" to be contained in:
@@ -1038,8 +1040,8 @@
                         [self.p_foo1_name[0], self.p_foo2_name[0]])
 
                 # make sure the error message mentions both packages.
-                substring_verify(str(e), self.p_foo1_name[0])
-                substring_verify(str(e), self.p_foo2_name[0])
+                pkg_err_verify(str(e), self.p_foo1_name[0])
+                pkg_err_verify(str(e), self.p_foo2_name[0])
 
                 # try to install packages with missing parent dependencies
                 e = assertRaises(
@@ -1051,8 +1053,8 @@
                         [self.p_sync1_name[0], self.p_sync2_name[0]])
 
                 # make sure the error message mentions both packages.
-                substring_verify(str(e), self.p_sync1_name[0])
-                substring_verify(str(e), self.p_sync2_name[0])
+                pkg_err_verify(str(e), self.p_sync1_name[0])
+                pkg_err_verify(str(e), self.p_sync2_name[0])
 
                 # uninstall synced packages in the parent
                 self._api_uninstall(api_objs[0], [
@@ -1067,8 +1069,8 @@
                         api_objs[1].gen_plan_update(*args, **kwargs)))
 
                 # make sure the error message mentions both synced packages.
-                substring_verify(str(e), self.p_sync3_name[1])
-                substring_verify(str(e), self.p_sync4_name[1])
+                pkg_err_verify(str(e), self.p_sync3_name[1])
+                pkg_err_verify(str(e), self.p_sync4_name[1])
 
 
         def test_sync_nosolver(self):
--- a/src/tests/cli/t_pkg_image_update.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/tests/cli/t_pkg_image_update.py	Wed Nov 19 09:58:50 2014 +0530
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 
-# Copyright (c) 2008, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2014, Oracle and/or its affiliates. All rights reserved.
 
 import testutils
 if __name__ == "__main__":
@@ -129,15 +129,57 @@
             add file %s mode=0755 owner=root group=bin path=/bin/true
             close """
 
+        # An example of dueling incorporations for an upgrade case.
+        dueling_inst = """
+            open [email protected]
+            add set name=pkg.depend.install-hold value=core-os
+            add depend fmri=consolidation/java-7/java-7-incorporation type=require
+            add depend facet.version-lock.consolidation/java-7/java-7-incorporation=true fmri=consolidation/java-7/[email protected] type=incorporate
+            add depend fmri=consolidation/java-7/[email protected] type=incorporate
+            add depend fmri=consolidation/osnet/osnet-incorporation type=require
+            add depend facet.version-lock.consolidation/osnet/osnet-incorporation=true fmri=consolidation/osnet/[email protected] type=incorporate
+            add depend fmri=consolidation/osnet/[email protected] type=incorporate
+            close
+            open consolidation/java-7/[email protected]
+            add depend fmri=runtime/java/[email protected],5.11 type=incorporate
+            close
+            open consolidation/osnet/[email protected]
+            add set name=pkg.depend.install-hold value=core-os.osnet
+            add depend fmri=pkg:/system/resource-mgmt/[email protected],5.12-5.12.0.0.0.45.25345 type=incorporate
+            close
+            open runtime/java/[email protected]
+            add depend fmri=consolidation/java-7/java-7-incorporation type=require
+            close
+            open system/resource-mgmt/[email protected]
+            add depend fmri=consolidation/osnet/osnet-incorporation type=require
+            add depend fmri=pkg:/runtime/java/[email protected] type=require
+            close
+        """
+
+        dueling_latest = """
+            open consolidation/osnet/[email protected]
+            add set name=pkg.depend.install-hold value=core-os.osnet
+            add depend fmri=pkg:/system/resource-mgmt/[email protected],5.12-5.12.0.0.0.46.25205 type=incorporate
+            close
+            open runtime/java/[email protected]
+            add depend fmri=consolidation/java-7/java-7-incorporation type=require
+            close
+            open system/resource-mgmt/[email protected],5.12-5.12.0.0.0.46.25205
+            add depend fmri=consolidation/osnet/osnet-incorporation type=require
+            add depend fmri=pkg:/runtime/java/[email protected] type=require
+            close
+        """
+
         def setUp(self):
                 # Two repositories are created for test2.
                 pkg5unittest.ManyDepotTestCase.setUp(self, ["test1", "test2",
-                    "test2", "test4", "test5"])
+                    "test2", "test4", "test5", "nightly"])
                 self.rurl1 = self.dcs[1].get_repo_url()
                 self.rurl2 = self.dcs[2].get_repo_url()
                 self.rurl3 = self.dcs[3].get_repo_url()
                 self.rurl4 = self.dcs[4].get_repo_url()
                 self.rurl5 = self.dcs[5].get_repo_url()
+                self.rurl6 = self.dcs[6].get_repo_url()
                 self.pkgsend_bulk(self.rurl1, (self.foo10, self.foo11,
                     self.baz11, self.qux10, self.qux11, self.quux10,
                     self.quux11, self.corge11, self.incorp10, self.incorp11))
@@ -153,6 +195,9 @@
                                 { "test1": "test%d" % i })
                         self.dcs[i].get_repo(auto_create=True).rebuild()
 
+                self.pkgsend_bulk(self.rurl6, (self.dueling_inst,
+                    self.dueling_latest))
+
         def test_image_update_bad_opts(self):
                 """Test update with bad options."""
 
@@ -277,7 +322,7 @@
                 self.pkg("update '*@latest'")
                 self.pkg("info [email protected] [email protected] [email protected]")
 
-        def test_bug_18536(self):
+        def test_upgrade_sticky(self):
                 """Test that when a package specified on the command line can't
                 be upgraded because of a sticky publisher, the exception raised
                 is correct."""
@@ -391,6 +436,36 @@
                 self.pkg("list %s" % elf1)
                 self.assertEqual(elf1sum, get_test_sum())
 
+        def test_dueling_incs(self):
+                """Verify that dueling incorporations don't result in a 'no
+                solution' error in a case sometimes found with 'nightly'
+                upgrades."""
+
+                self.image_create(self.rurl6)
+                self.pkg("change-facet "
+                    "version-lock.consolidation/osnet/osnet-incorporation=false")
+                self.pkg("install [email protected] "
+                    "[email protected] "
+                    "system/resource-mgmt/[email protected]")
+
+                # Failure is expected for these cases because an installed
+                # incorporation prevents the upgrade of an installed dependency
+                # required by the new packages.
+
+                # Should fail and result in 'no solution' because user failed to
+                # specify any input.
+                self.pkg("update -nv", exit=1, assert_solution=False)
+                self.assert_("No solution" in self.errout)
+
+                # Should fail, but not result in 'no solution' because user
+                # specified a particular package.
+                self.pkg("update -nv osnet-incorporation@latest", exit=1)
+                self.assert_("No matching version" in self.errout)
+
+                # Should exit with 'nothing to do' since update to new version
+                # of osnet-incorporation is not possible.
+                self.pkg("update -nv osnet-incorporation", exit=4)
+
 
 class TestPkgUpdateOverlappingPatterns(pkg5unittest.SingleDepotTestCase):
 
@@ -519,5 +594,6 @@
                 self.pkg("update '*' 'pkg://pub2/*@1' 'pkg://test/*@2'", exit=1)
                 self._api_uninstall(api_inst, ["*"])
 
+
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkg_install.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/tests/cli/t_pkg_install.py	Wed Nov 19 09:58:50 2014 +0530
@@ -5400,18 +5400,28 @@
             close
         """
 
+        exclude_group = """
+            open network/[email protected]
+            close
+            open [email protected]
+            add depend type=group fmri=network/rsync
+            close
+            open utility/[email protected]
+            add depend type=exclude fmri=network/rsync
+            close
+        """
+
         def setUp(self):
                 pkg5unittest.SingleDepotTestCase.setUp(self, image_count=2)
                 self.pkgsend_bulk(self.rurl, (self.pkg10, self.pkg20,
                     self.pkg11, self.pkg21, self.pkg30, self.pkg40, self.pkg50,
                     self.pkg505, self.pkg51, self.pkg60, self.pkg61,
                     self.bug_18653, self.pkg70, self.pkg80, self.pkg81,
-                    self.pkg90, self.pkg91, self.bug_7394_incorp,
-                    self.pkg100, self.pkg101, self.pkg102,
-                    self.pkg110, self.pkg111,
+                    self.pkg90, self.pkg91, self.bug_7394_incorp, self.pkg100,
+                    self.pkg101, self.pkg102, self.pkg110, self.pkg111,
                     self.pkg121, self.pkg122, self.pkg123, self.pkg132,
                     self.pkg142, self.pkg_nosol, self.pkg_renames,
-                    self.pkgSUNWcs075))
+                    self.pkgSUNWcs075, self.exclude_group))
 
                 self.leaf_pkgs = []
                 for t in self.leaf_expansion:
@@ -5448,6 +5458,41 @@
                 self.pkg("%s [email protected]" % install_cmd)
                 self.pkg("verify  [email protected] [email protected]")
 
+        def test_exclude_group_install(self):
+                """Verify that a simultaneous exclude and group dependency on
+                the same package is handled gracefully."""
+
+                self.image_create(self.rurl)
+
+                # These should fail (gracefully) because my-rsync packages
+                # excludes network/rsync which is a group dependency of
+                # gold-server package.
+                self.pkg("install network/rsync gold-server my-rsync", exit=1)
+
+                self.pkg("install network/rsync")
+                self.pkg("install gold-server my-rsync", exit=1)
+                self.pkg("uninstall '*'")
+
+                # This should succeed because network/rsync dependency is not
+                # installed.
+                self.pkg("avoid network/rsync")
+                self.pkg("install -nv gold-server my-rsync")
+
+                # This will install network/rsync and remove it from the avoid
+                # list.
+                self.pkg("install network/rsync")
+
+                # This should succeed because network/rsync will be removed and
+                # placed on avoid list as part of operation.
+                self.pkg("install --reject network/rsync gold-server my-rsync")
+
+                # Now remove gold-server and then verify install will fail.
+                self.pkg("uninstall gold-server")
+                self.pkg("unavoid network/rsync")
+                # No solution as there's no installed constraining package and
+                # user didn't provide sufficient input.
+                self.pkg("install gold-server", assert_solution=False, exit=1)
+
         def test_exclude_dependencies_install(self):
                 """ exercise exclude dependencies """
 
@@ -5733,7 +5778,8 @@
 
                 # test to see if solver will fail gracefully when no solution is
                 # possible and a require-any dependency is involved
-                self.pkg("install -vvv pkg-nosol-A pkg-nosol-E", exit=1)
+                self.pkg("install -vvv pkg-nosol-A pkg-nosol-E",
+                    assert_solution=False, exit=1)
 
                 # test to see if solver will pick one
                 self.pkg("install [email protected]")  # install pkg
@@ -5781,7 +5827,8 @@
 
                 # Test to see if solver will fail gracefully when no solution is
                 # possible and a require-any dependency is involved.
-                self.pkg("exact-install -v pkg-nosol-A pkg-nosol-E", exit=1)
+                self.pkg("exact-install -v pkg-nosol-A pkg-nosol-E",
+                    assert_solution=False, exit=1)
 
                 # Test to see if solver will pick one.
                 self.pkg("exact-install [email protected]")
@@ -7639,6 +7686,7 @@
                 self.pkg("update -v")
                 self.pkg("list inc2p2", exit=1)
 
+
 class TestObsoletionNestedIncorporations(pkg5unittest.SingleDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
 
@@ -7770,6 +7818,76 @@
                 self.pkg("%s stem" % install_cmd, exit=1)
 
 
+class TestPkgInstallMultiIncorp(pkg5unittest.ManyDepotTestCase):
+        """Tests involving incorporations and multiple publishers."""
+
+        incorporated_latest = """
+            open [email protected]
+            close
+            open [email protected]
+            close"""
+
+        incorporated = """
+            open [email protected]
+            close
+            open [email protected]
+            close """
+
+        incorporates = """
+            open [email protected]
+            add depend type=incorporate [email protected]
+            close
+            open [email protected]
+            add depend type=incorporate [email protected]
+            close"""
+
+        persistent_setup = True
+
+        def setUp(self):
+                pkg5unittest.ManyDepotTestCase.setUp(self, ["test1", "test2"])
+                self.rurl1 = self.dcs[1].get_repo_url()
+                self.rurl2 = self.dcs[2].get_repo_url()
+
+        def test_1_incorp_latest_older(self):
+                """Ensure that if the newest release version of a package is
+                available for an older branch that incorporate dependencies work
+                as expected."""
+
+                self.image_create(self.rurl1)
+                self.pkgsend_bulk(self.rurl1, (self.incorporates,
+                    self.incorporated, self.incorporated_latest))
+
+                # First, install two incorporations that intersect such that
+                # only the version before the latest branch can be installed.
+                self.pkg("install userland-incorporation vim-incorporation")
+
+                # Then, attempt to install vim; this should succeed even though
+                # the newest version available is for an older branch.
+                self.pkg("install [email protected]")
+
+        def test_2_incorp_multi_pub(self):
+                """Ensure that if another publisher offers newer packages that
+                satisfy an incorporate dependency, but are rejected because of
+                publisher selection, that the preferred publisher's package can
+                still satisfy the incorporate."""
+
+                self.image_create(self.rurl1)
+                self.pkgsend_bulk(self.rurl1, (self.incorporates,
+                    self.incorporated))
+                self.pkgsend_bulk(self.rurl2, self.incorporated_latest)
+
+                # First, install the incorporation.
+                self.pkg("install userland-incorporation")
+
+                # Next, add the second publisher.
+                self.pkg("set-publisher -p %s" % self.rurl2)
+
+                # Next, verify that first publisher's incorporated package can
+                # be installed since it satisfies incorporate dependencies even
+                # though second publisher's versions will be rejected.
+                self.pkg("install //test1/vim")
+
+
 class TestPkgInstallLicense(pkg5unittest.SingleDepotTestCase):
         """Tests involving one or more packages that require license acceptance
         or display."""
--- a/src/tests/cli/t_pkg_version.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/tests/cli/t_pkg_version.py	Wed Nov 19 09:58:50 2014 +0530
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 
-# Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2014, Oracle and/or its affiliates. All rights reserved.
 
 import testutils
 if __name__ == "__main__":
@@ -38,9 +38,9 @@
 
                 self.image_create(self.rurl)
 
-                self.pkg("version -vq", exit=2)
-                self.pkg("version foo", exit=2)
-                self.pkg("version --", exit=2)
+                self.pkg("version -vq", use_img_root=False, exit=2)
+                self.pkg("version foo", use_img_root=False, exit=2)
+                self.pkg("version --", use_img_root=False, exit=2)
 
 
 if __name__ == "__main__":
--- a/src/tests/pkg5unittest.py	Tue Nov 18 10:56:28 2014 +0900
+++ b/src/tests/pkg5unittest.py	Wed Nov 19 09:58:50 2014 +0530
@@ -2466,7 +2466,8 @@
 
         def pkg(self, command, exit=0, comment="", prefix="", su_wrap=None,
             out=False, stderr=False, cmd_path=None, use_img_root=True,
-            debug_smf=True, env_arg=None, coverage=True, handle=False):
+            debug_smf=True, env_arg=None, coverage=True, handle=False,
+            assert_solution=True):
 
                 if isinstance(command, list):
                         cmdstr = " ".join(command)
@@ -2480,8 +2481,8 @@
 
                 cmdline.append(cmd_path)
 
-                if use_img_root and "-R" not in cmdstr and \
-                    "image-create" not in cmdstr and "version" not in cmdstr:
+                if (use_img_root and "-R" not in cmdstr and
+                    "image-create" not in cmdstr):
                         cmdline.extend(("-R", self.get_img_path()))
 
                 cmdline.extend(("-D", "plandesc_validate=1"))
@@ -2496,10 +2497,22 @@
                 else:
                         cmdline.extend(command)
 
-                return self.cmdline_run(cmdline, exit=exit, comment=comment,
+                rval = self.cmdline_run(cmdline, exit=exit, comment=comment,
                     prefix=prefix, su_wrap=su_wrap, out=out, stderr=stderr,
                     env_arg=env_arg, coverage=coverage, handle=handle)
 
+                if assert_solution:
+                        # Ensure solver never fails with 'No solution' by
+                        # default to prevent silent failures for the wrong
+                        # reason.
+                        for buf in (self.errout, self.output):
+                                self.assert_("No solution" not in buf,
+                                    msg="Solver could not find solution for "
+                                    "operation; set assert_solution=False if "
+                                    "this is expected when calling pkg().")
+
+                return rval
+
         def pkg_verify(self, command, exit=0, comment="", prefix="",
             su_wrap=None, out=False, stderr=False, cmd_path=None,
             use_img_root=True, debug_smf=True, env_arg=None, coverage=True):