15743323 linked images should propagate incorporation facet version locks s12b28
authorEdward Pilatowicz <edward.pilatowicz@oracle.com>
Wed, 07 Aug 2013 15:30:45 -0700
changeset 2925 3e1fbeb31067
parent 2923 eacabfa72952
child 2926 e67313720cf4
15743323 linked images should propagate incorporation facet version locks 17272481 pkg change-{variant|facet} fast path doesn't account for --reject
doc/client_api_versions.txt
src/client.py
src/man/pkg.1
src/man/pkg.5
src/modules/actions/generic.py
src/modules/client/api.py
src/modules/client/image.py
src/modules/client/imageconfig.py
src/modules/client/imageplan.py
src/modules/client/linkedimage/common.py
src/modules/client/options.py
src/modules/client/pkg_solver.py
src/modules/client/plandesc.py
src/modules/facet.py
src/po/POTFILES.in
src/tests/api/t_linked_image.py
src/tests/cli/t_change_facet.py
src/tests/cli/t_pkg_linked.py
src/tests/cli/t_pkg_varcet.py
--- a/doc/client_api_versions.txt	Mon Aug 05 11:24:42 2013 -0700
+++ b/doc/client_api_versions.txt	Wed Aug 07 15:30:45 2013 -0700
@@ -1,3 +1,29 @@
+Version 76:
+Compatible with clients using versions 72-75 [1].
+
+    pkg.client.api.ImageInterface has changed as follows:
+
+	* The generator functions 'gen_facets()' has been updated to
+	  give more information about what facets are changing.
+	  Previously a 2 value tuple was returned, now a 4 value tuple
+	  is returned.
+
+	* [1] This is actually an incompatible change to the old
+	  'gen_facets()' behavior, but since gen_facets was only
+	  introduced in the previous version of the client API it's
+	  unlikely that anyone outside of the pkg gate has adopted it's
+	  use.  But since we'd like to avoid an incompatible client API
+	  change we're going to ignore this issue.
+
+	* The parsable package plan output format for facet changes has
+	  been changed.  Previously a tuple with 2 values was returned;
+	  now a tuple with 6 values is returned.
+
+	* Added 'li_parent_sync' parameter to 'gen_plan_uninstall()'.
+
+	* Added 'li_md_only' and 'li_pkg_updates' parameters to
+	  'gen_plan_detach()' and 'detach_linked_children()'
+
 Version 75:
 Compatible with clients using versions 72-74.
 
--- a/src/client.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/client.py	Wed Aug 07 15:30:45 2013 -0700
@@ -272,7 +272,7 @@
             "            <mediator> ...")
 
         adv_usage["variant"] = _("[-Haiv] [-F format] [<variant_pattern> ...]")
-        adv_usage["facet"] = ("[-Hai] [-F format] [<facet_pattern> ...]")
+        adv_usage["facet"] = ("[-Haim] [-F format] [<facet_pattern> ...]")
         adv_usage["avoid"] = _("[pkg_fmri_pattern] ...")
         adv_usage["unavoid"] = _("[pkg_fmri_pattern] ...")
         adv_usage["freeze"] = _("[-n] [-c reason] [pkg_fmri_pattern] ...")
@@ -320,9 +320,10 @@
             "            [--prop-linked <propname>=<propvalue> ...]\n"
             "            (-c|-p) <li-name> <dir>")
         priv_usage["detach-linked"] = _(
-            "[-fnvq] [-a|-l <li-name>] [--linked-md-only]")
+            "[-fnvq] [-a|-l <li-name>] [--no-pkg-updates] [--linked-md-only]")
         priv_usage["property-linked"] = _("[-H] [-l <li-name>] [propname ...]")
-        priv_usage["audit-linked"] = _("[-a|-l <li-name>]")
+        priv_usage["audit-linked"] = _(
+            "[-H] [-a|-l <li-name>] [--no-parent-sync]")
         priv_usage["pubcheck-linked"] = ""
         priv_usage["sync-linked"] = _(
             "[-nvq] [-C n] [--accept] [--licenses] [--no-index]\n"
@@ -2053,7 +2054,8 @@
 
 def uninstall(op, api_inst, pargs,
     be_activate, backup_be, backup_be_name, be_name, new_be, li_ignore,
-    update_index, noexecute, parsable_version, quiet, verbose, stage):
+    li_parent_sync, update_index, noexecute, parsable_version, quiet,
+    verbose, stage):
         """Attempt to take package specified to DELETED state."""
 
         if not pargs:
@@ -2070,7 +2072,8 @@
             _noexecute=noexecute, _quiet=quiet, _stage=stage,
             _verbose=verbose, backup_be=backup_be,
             backup_be_name=backup_be_name, be_activate=be_activate,
-            be_name=be_name, new_be=new_be, _parsable_version=parsable_version,
+            be_name=be_name, li_parent_sync=li_parent_sync, new_be=new_be,
+            _parsable_version=parsable_version,
             pkgs_to_uninstall=pargs, update_index=update_index)
 
 def update(op, api_inst, pargs, accept, backup_be, backup_be_name, be_activate,
@@ -4855,7 +4858,7 @@
         return EXIT_OK
 
 def list_facet(op, api_inst, pargs, omit_headers, output_format, list_all_items,
-    list_installed):
+    list_masked, list_installed):
         """pkg facet [-Hai] [-F format] [<facet_pattern> ...]"""
 
         subcommand = "facet"
@@ -4873,28 +4876,40 @@
                 facet_list = api_inst.FACET_INSTALLED
 
         def gen_listing():
-                for (name, val) in api_inst.gen_facets(facet_list,
-                    patterns=req_facets):
+                for (name, val, src, masked) in \
+                    api_inst.gen_facets(facet_list, patterns=req_facets):
                         found[0] = True
 
-                        # Values here are intentionally not _().
+                        if not list_masked and masked:
+                                continue
+
+                        # "value" and "masked" are intentionally not _().
                         yield {
                             "facet": name,
-                            "value": val and "True" or "False"
+                            "value": val and "True" or "False",
+                            "src": src,
+                            "masked": masked and "True" or "False",
                         }
 
         #    FACET VALUE
-        #    <facet> <value>
-        #    <facet_2> <value_2>
+        #    <facet> <value> <src>
+        #    <facet_2> <value_2> <src2>
         #    ...
         field_data = {
-            "facet" : [("default", "json", "tsv"), _("FACET"), ""],
-            "value" : [("default", "json", "tsv"), _("VALUE"), ""],
+            "facet"  : [("default", "json", "tsv"), _("FACET"), ""],
+            "value"  : [("default", "json", "tsv"), _("VALUE"), ""],
+            "src"    : [("default", "json", "tsv"), _("SRC"), ""],
         }
-        desired_field_order = (_("FACET"), _("VALUE"))
-
-        # Default output formatting.
-        def_fmt = "%-70s %s"
+        desired_field_order = (_("FACET"), _("VALUE"), _("SRC"))
+        def_fmt = "%-64s %-5s %s"
+
+        if list_masked:
+                # if we're displaying masked facets, we should also mark which
+                # facets are masked in the output.
+                field_data["masked"] = \
+                    [("default", "json", "tsv"), _("MASKED"), ""]
+                desired_field_order += (_("MASKED"),)
+                def_fmt = "%-57s %-5s %-6s %-s"
 
         # print without trailing newline.
         sys.stdout.write(misc.get_listing(desired_field_order,
@@ -5211,8 +5226,8 @@
                         return EXIT_OOPS
         return rv
 
-def detach_linked(op, api_inst, pargs, force, li_target_all, li_target_list,
-    noexecute, quiet, verbose):
+def detach_linked(op, api_inst, pargs, force, li_md_only, li_pkg_updates,
+    li_target_all, li_target_list, noexecute, quiet, verbose):
         """pkg detach-linked
             [-fnvq] [-a|-l <li-name>] [--linked-md-only]
 
@@ -5221,11 +5236,13 @@
         if not li_target_all and not li_target_list:
                 # detach the current image
                 return __api_op(op, api_inst, _noexecute=noexecute,
-                    _quiet=quiet, _verbose=verbose, force=force)
+                    _quiet=quiet, _verbose=verbose, force=force,
+                    li_md_only=li_md_only, li_pkg_updates=li_pkg_updates)
 
         api_inst.progresstracker.set_major_phase(
             api_inst.progresstracker.PHASE_UTILITY)
         rvdict = api_inst.detach_linked_children(li_target_list, force=force,
+            li_md_only=li_md_only, li_pkg_updates=li_pkg_updates,
             noexecute=noexecute)
 
         rv, err, p_dicts = api_inst.detach_linked_rvdict2rv(rvdict)
@@ -5864,6 +5881,7 @@
     "attach_parent" :     ("p",  ""),
 
     "list_available" :    ("a",  ""),
+    "list_masked" :       ("m",  ""),
     "list_all_items" :    ("a",  ""),
     "output_format" :     ("F",  "output-format"),
 
@@ -5987,8 +6005,10 @@
 ]
 
 opts_list_facet = \
-    [opts_cb_varcet] + \
-    opts_list_varcet
+    opts_list_varcet + \
+    [
+    ("list_masked",             False),
+]
 
 opts_list_variant = \
     opts_list_varcet + \
--- a/src/man/pkg.1	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/man/pkg.1	Wed Aug 07 15:30:45 2013 -0700
@@ -130,7 +130,7 @@
 
 .LP
 .nf
-/usr/bin/pkg facet [-Hai] [-F \fIformat\fR] [\fIfacet_pattern\fR ...]
+/usr/bin/pkg facet [-Haim] [-F \fIformat\fR] [\fIfacet_pattern\fR ...]
 .fi
 
 .LP
@@ -1668,11 +1668,11 @@
 .ne 2
 .mk
 .na
-\fB\fBpkg facet\fR [\fB-Hai\fR] [\fB-F\fR \fIformat\fR] [\fIfacet_pattern\fR ...]\fR
+\fB\fBpkg facet\fR [\fB-Haim\fR] [\fB-F\fR \fIformat\fR] [\fIfacet_pattern\fR ...]\fR
 .ad
 .sp .6
 .RS 4n
-Display the current values of all facets that have been explicitly set in this image by using the \fBpkg change-facet\fR command. See "Facets and Variants" in the \fBpkg\fR(5) man page for more information about facets.
+Display the current values and source of all facets that have been explicitly set in this image by using the \fBpkg change-facet\fR command or inherited from a parent image. See "Facets and Variants" in the \fBpkg\fR(5) man page for more information about facets.
 .sp
 .ne 2
 .mk
@@ -1728,6 +1728,17 @@
 Display all facets that are listed in installed packages. This option cannot be combined with \fB-a\fR.
 .RE
 
+.sp
+.ne 2
+.mk
+.na
+\fB\fB-m\fR\fR
+.ad
+.sp .6
+.RS 4n
+Include masked facets in the output. Include a column indicating which (if any) facets are masked.
+.RE
+
 .RE
 
 .sp
--- a/src/man/pkg.5	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/man/pkg.5	Wed Aug 07 15:30:45 2013 -0700
@@ -1200,6 +1200,9 @@
 Facets are treated as boolean values by package clients: Facets can be set only to \fBtrue\fR (enabled) or \fBfalse\fR (disabled) in the image. By default, all facets are considered to be set to \fBtrue\fR in the image.
 .sp
 .LP
+Facets can either be set locally within an image or inherited from a parent image. Inherited facets are evaluated before, and hence take priority over, any local facets. If the same exact facet is both inherited and set locally, the inherited facet value masks the local value. Facet changes made via \fBpkg change-facet\fR will only affect local facets.
+.sp
+.LP
 The value of a facet tag on an action can be set to \fBall\fR or \fBtrue\fR to control how clients filter faceted actions. All values other than \fBall\fR or \fBtrue\fR have undefined behavior. See below for a description of the conditions that must exist in the image to install an action that has facet tags.
 .sp
 .LP
--- a/src/modules/actions/generic.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/actions/generic.py	Wed Aug 07 15:30:45 2013 -0700
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a generic packaging object
@@ -46,6 +46,8 @@
 import pkg.portable as portable
 import pkg.variant as variant
 
+from pkg.misc import EmptyDict
+
 # Directories must precede all filesystem object actions; hardlinks must follow
 # all filesystem object actions (except links).  Note that user and group
 # actions precede file actions (so that the system permits chown'ing them to
@@ -622,6 +624,33 @@
                     (v, self.attrs[v]) for v in self.get_varcet_keys()[0]
                 )))
 
+        def strip(self, preserve=EmptyDict):
+                """Strip actions of attributes which are unnecessary once
+                those actions have been installed in an image.  Stripped
+                actions are saved in an images stripped action cache and used
+                for conflicting actions checks during image planning
+                operations."""
+
+                for key in self.attrs.keys():
+                        # strip out variant and facet information
+                        if key[:8] == "variant." or key[:6] == "facet.":
+                                del self.attrs[key]
+                                continue
+                        # keep unique attributes
+                        if not self.unique_attrs or key in self.unique_attrs:
+                                continue
+                        # keep file action overlay attributes
+                        if self.name == "file" and key == "overlay":
+                                continue
+                        # keep specified keys
+                        if key in preserve.get(self.name, []):
+                                continue
+                        # keep link/hardlink action mediator attributes
+                        if (self.name == "link" or self.name == "hardlink") \
+                            and key[:8] == "mediator":
+                                continue
+                        del self.attrs[key]
+
         def strip_variants(self):
                 """Remove all variant tags from the attrs dictionary."""
 
--- a/src/modules/client/api.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/api.py	Wed Aug 07 15:30:45 2013 -0700
@@ -103,8 +103,8 @@
 # things like help(pkg.client.api.PlanDescription)
 from pkg.client.plandesc import PlanDescription # pylint: disable=W0611
 
-CURRENT_API_VERSION = 75
-COMPATIBLE_API_VERSIONS = frozenset([72, 73, 74, CURRENT_API_VERSION])
+CURRENT_API_VERSION = 76
+COMPATIBLE_API_VERSIONS = frozenset([72, 73, 74, 75, CURRENT_API_VERSION])
 CURRENT_P5I_VERSION = 1
 
 # Image type constants.
@@ -257,6 +257,10 @@
         FACET_IMAGE = 1
         FACET_INSTALLED = 2
 
+        FACET_SRC_SYSTEM = pkg.facet.Facets.FACET_SRC_SYSTEM
+        FACET_SRC_LOCAL = pkg.facet.Facets.FACET_SRC_LOCAL
+        FACET_SRC_PARENT = pkg.facet.Facets.FACET_SRC_PARENT
+
         # Constants used to reference specific values that info can return.
         INFO_FOUND = 0
         INFO_MISSING = 1
@@ -834,6 +838,8 @@
                     (
                         name,    - (string) facet name (e.g. facet.doc)
                         value    - (boolean) current facet value
+                        src      - (string) source for the value
+                        masked   - (boolean) is the facet maksed by another
                     )
 
                 Results are always sorted by facet name.
@@ -881,10 +887,22 @@
                 # Generate the results.
                 for name in misc.yield_matching("facet.", sorted(fimg | fpkg),
                     patterns):
-                        # The image's Facets dictionary will return the
-                        # effective value for any facets not explicitly set in
-                        # the image (wildcards or implicit).
-                        yield (name, facets[name])
+                        # check if the facet is explicitly set.
+                        if name not in facets:
+                                # The image's Facets dictionary will return
+                                # the effective value for any facets not
+                                # explicitly set in the image (wildcards or
+                                # implicit). _match_src() will tell us how
+                                # that effective value was determined (via a
+                                # local or inherited wildcard facet, or via a
+                                # system default).
+                                src = facets._match_src(name)
+                                yield (name, facets[name], src, False)
+                                continue
+
+                        # This is an explicitly set facet.
+                        for value, src, masked in facets._src_values(name):
+                                yield (name, value, src, masked)
 
         def gen_variants(self, variant_list, patterns=misc.EmptyI):
                 """A generator function that produces tuples of the form:
@@ -1350,7 +1368,7 @@
 
                         if _li_parent_sync:
                                 # refresh linked image data from parent image.
-                                self._img.linked.syncmd_from_parent(api_op=_op)
+                                self._img.linked.syncmd_from_parent()
 
                         # initialize recursion state
                         self._img.linked.api_recurse_init(
@@ -1860,7 +1878,8 @@
 
         def gen_plan_detach(self, backup_be=None,
             backup_be_name=None, be_activate=True, be_name=None, force=False,
-            li_ignore=None, new_be=False, noexecute=False):
+            li_ignore=None, li_md_only=False, li_pkg_updates=True, new_be=False,
+            noexecute=False):
                 """This is a generator function that yields a PlanDescription
                 object.  If parsable_version is set, it also yields dictionaries
                 containing plan information for child images.
@@ -1885,9 +1904,10 @@
                 return self.__plan_op(op, _ad_kwargs=ad_kwargs,
                     _backup_be=backup_be, _backup_be_name=backup_be_name,
                     _be_activate=be_activate, _be_name=be_name,
-                    _li_ignore=li_ignore, _new_be=new_be,
-                    _noexecute=noexecute, _refresh_catalogs=False,
-                    _update_index=False, li_pkg_updates=False)
+                    _li_ignore=li_ignore, _li_md_only=li_md_only,
+                    _new_be=new_be, _noexecute=noexecute,
+                    _refresh_catalogs=False, _update_index=False,
+                    li_pkg_updates=li_pkg_updates)
 
         def plan_uninstall(self, pkg_list, noexecute=False, update_index=True,
             be_name=None, new_be=False, be_activate=True):
@@ -1900,8 +1920,8 @@
 
         def gen_plan_uninstall(self, pkgs_to_uninstall,
             backup_be=None, backup_be_name=None, be_activate=True,
-            be_name=None, li_ignore=None, new_be=False, noexecute=False,
-            update_index=True):
+            be_name=None, li_ignore=None, li_parent_sync=True, new_be=False,
+            noexecute=False, update_index=True):
                 """This is a generator function that yields a PlanDescription
                 object.  If parsable_version is set, it also yields dictionaries
                 containing plan information for child images.
@@ -1927,7 +1947,7 @@
                 return self.__plan_op(op,
                     _backup_be=backup_be, _backup_be_name=backup_be_name,
                     _be_activate=be_activate, _be_name=be_name,
-                    _li_ignore=li_ignore, _li_parent_sync=False,
+                    _li_ignore=li_ignore, _li_parent_sync=li_parent_sync,
                     _new_be=new_be, _noexecute=noexecute,
                     _refresh_catalogs=False,
                     _update_index=update_index,
@@ -2128,7 +2148,8 @@
                     refresh_catalogs=refresh_catalogs, reject_list=reject_list,
                     show_licenses=show_licenses, update_index=update_index)
 
-        def detach_linked_children(self, li_list, force=False, noexecute=False):
+        def detach_linked_children(self, li_list, force=False,
+            li_md_only=False, li_pkg_updates=True, noexecute=False):
                 """Detach one or more children from the current image. This
                 operation results in the removal of any constraint package
                 from the child images.
@@ -2149,7 +2170,9 @@
                 error."""
 
                 return self._img.linked.detach_children(li_list,
-                    force=force, noexecute=noexecute)
+                    force=force, li_md_only=li_md_only,
+                    li_pkg_updates=li_pkg_updates,
+                    noexecute=noexecute)
 
         def detach_linked_rvdict2rv(self, rvdict):
                 """Convenience function that takes a dictionary returned from
@@ -2216,9 +2239,15 @@
 
                 lin = self._img.linked.child_name
                 rvdict = {}
-                ret = self._img.linked.audit_self(
-                    li_parent_sync=li_parent_sync)
-                rvdict[lin] = ret
+
+                if li_parent_sync:
+                        # refresh linked image data from parent image.
+                        rvdict[lin] = self._img.linked.syncmd_from_parent(
+                            catch_exception=True)
+                        if rvdict[lin] is not None:
+                                return rvdict
+
+                rvdict[lin] = self._img.linked.audit_self()
                 return rvdict
 
         def ischild(self):
--- a/src/modules/client/image.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/image.py	Wed Aug 07 15:30:45 2013 -0700
@@ -3553,17 +3553,7 @@
                         for act in m.gen_actions(excludes):
                                 if not act.globally_identical:
                                         continue
-                                for key in act.attrs.keys():
-                                        if (act.unique_attrs and
-                                            key not in act.unique_attrs and
-                                            not (act.name == "file" and
-                                                key == "overlay") and
-                                            not ((act.name == "link" or
-                                                  act.name == "hardlink") and
-                                                 key.startswith("mediator"))) or \
-                                            key.startswith("variant.") or \
-                                            key.startswith("facet."):
-                                                del act.attrs[key]
+                                act.strip()
                                 heappush(heap, (act.name,
                                     act.attrs[act.key_attr], pfmri, act))
                                 nsd.setdefault(act.namespace_group, {})
--- a/src/modules/client/imageconfig.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/imageconfig.py	Wed Aug 07 15:30:45 2013 -0700
@@ -183,6 +183,9 @@
                 cfg.PropertySection("facet", properties=[
                     cfg.PropertyTemplate("^facet\..*", prop_type=cfg.PropBool),
                 ]),
+                cfg.PropertySection("inherited_facet", properties=[
+                    cfg.PropertyTemplate("^facet\..*", prop_type=cfg.PropBool),
+                ]),
                 cfg.PropertySection("mediators", properties=[
                     cfg.PropertyTemplate("^[A-Za-z0-9\-]+\.implementation$"),
                     cfg.PropertyTemplate("^[A-Za-z0-9\-]+\.implementation-version$",
@@ -400,6 +403,9 @@
                 for k, v in idx.get("facet", {}).iteritems():
                         # convert facet name from unicode to a string
                         self.facets[str(urllib.unquote(k))] = v
+                for k, v in idx.get("inherited_facet", {}).iteritems():
+                        # convert facet name from unicode to a string
+                        self.facets._set_inherited(str(urllib.unquote(k)), v)
 
                 # Ensure architecture and zone variants are defined.
                 if "variant.arch" not in self.variants:
@@ -537,9 +543,19 @@
                         self.remove_section("facet")
                 except cfg.UnknownSectionError:
                         pass
-                for f in self.facets:
-                        self.set_property("facet", urllib.quote(f, ""), 
-                            self.facets[f])
+                # save local facets
+                for f in self.facets.local:
+                        self.set_property("facet",
+                            urllib.quote(f, ""), self.facets.local[f])
+
+                try:
+                        self.remove_section("inherited_facet")
+                except cfg.UnknownSectionError:
+                        pass
+                # save inherited facets
+                for f in self.facets.inherited:
+                        self.set_property("inherited_facet",
+                            urllib.quote(f, ""), self.facets.inherited[f])
 
                 try:
                         self.remove_section("mediators")
--- a/src/modules/client/imageplan.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/imageplan.py	Wed Aug 07 15:30:45 2013 -0700
@@ -262,11 +262,9 @@
                                         # packages, add it to the list.
                                         fmri_updates.append((a, a))
 
-                if fmri_updates and not li_pkg_updates:
-                        # oops.  the caller requested no package updates and
-                        # we couldn't satisfy that request.
-                        raise api_errors.PlanCreationException(
-                            pkg_updates_required=fmri_updates)
+                # cache li_pkg_updates in the plan description for later
+                # evaluation
+                self.pd._li_pkg_updates = li_pkg_updates
 
                 return fmri_updates
 
@@ -276,32 +274,106 @@
 
                 self.pd._image_lm = self.image.get_last_modified(string=True)
 
-        def __evaluate_varcets(self, new_variants, new_facets):
+        def __merge_inherited_facets(self, new_facets=None):
+                """Merge any new facets settings with (possibly changing)
+                inherited facets."""
+
+                if new_facets is not None:
+                        # make sure we don't accidentally update the caller
+                        # supplied facets.
+                        new_facets = pkg.facet.Facets(new_facets)
+
+                        # we don't allow callers to specify inherited facets
+                        # (they can only come from parent images.)
+                        new_facets._clear_inherited()
+
+                # get the existing image facets.
+                old_facets = self.image.cfg.facets
+
+                if new_facets is None:
+                        # the user did not request any facet changes, but we
+                        # still need to see if inherited facets are changing.
+                        # so set new_facets to the existing facet set with
+                        # inherited facets removed.
+                        new_facets = pkg.facet.Facets(old_facets)
+                        new_facets._clear_inherited()
+
+                # get the latest inherited facets and merge them into the user
+                # specified facets.
+                new_facets.update(self.image.linked.inherited_facets())
+
+                if new_facets == old_facets:
+                        # there are no caller specified or inherited facet
+                        # changes.
+                        return (None, False, False)
+
+                facet_change = bool(old_facets._cmp_values(new_facets)) or \
+                    bool(old_facets._cmp_priority(new_facets))
+                masked_facet_change = bool(not facet_change) and \
+                    bool(old_facets._cmp_all_values(new_facets))
+
+                # Something better be changing.  But if visible facets are
+                # changing we don't report masked facet changes.
+                assert facet_change != masked_facet_change
+
+                return (new_facets, facet_change, masked_facet_change)
+
+        def __evaluate_varcets(self, new_variants=None, new_facets=None):
                 """Private helper function used to determine new facet and
                 variant state for image."""
 
-                old_facets = self.image.cfg.facets
-                if new_variants or \
-                    (new_facets is not None and new_facets != old_facets):
+                # merge caller supplied and inherited facets
+                new_facets, facet_change, masked_facet_change = \
+                    self.__merge_inherited_facets(new_facets)
+
+                # if we're changing variants or facets, save that to the plan.
+                if new_variants or facet_change or masked_facet_change:
                         self.pd._varcets_change = True
                         self.pd._new_variants = new_variants
+                        self.pd._old_facets   = self.image.cfg.facets
                         self.pd._new_facets   = new_facets
-                        tmp_new_facets = new_facets
-                        if tmp_new_facets is None:
-                                tmp_new_facets = pkg.facet.Facets()
-                        self.pd._changed_facets = pkg.facet.Facets(dict(
-                            set(tmp_new_facets.iteritems()) -
-                            set(old_facets.iteritems())))
-                        self.pd._removed_facets = set(old_facets.keys()) - \
-                            set(tmp_new_facets.keys())
-
-                if new_facets == old_facets:
-                        new_facets = None
+                        self.pd._facet_change = facet_change
+                        self.pd._masked_facet_change = masked_facet_change
 
                 self.__new_excludes = self.image.list_excludes(new_variants,
                     new_facets)
 
-                return new_variants, new_facets
+                return (new_variants, new_facets, facet_change,
+                    masked_facet_change)
+
+        def __run_solver(self, solver_cb, retry_wo_parent_deps=True):
+                """Run the solver, and if it fails, optionally retry the
+                operation once while relaxing installed parent
+                dependencies."""
+
+                # have the solver try to satisfy parent dependencies.
+                ignore_inst_parent_deps = False
+
+                try:
+                        return solver_cb(ignore_inst_parent_deps)
+                except api_errors.PlanCreationException, e:
+                        # if we're currently in sync don't retry the
+                        # operation
+                        if self.image.linked.insync(latest_md=False):
+                                raise e
+                        # if PKG_REQUIRE_SYNC is set in the
+                        # environment we require an in-sync image.
+                        if "PKG_REQUIRE_SYNC" in os.environ:
+                                raise e
+                        # caller doesn't want us to retry
+                        if not retry_wo_parent_deps:
+                                raise e
+                        # we're an out-of-sync child image so retry
+                        # this operation while ignoring parent
+                        # dependencies for any installed packages.  we
+                        # do this so that users can manipulate out of
+                        # sync images in an attempt to bring them back
+                        # in sync.  since we don't ignore parent
+                        # dependencies for uninstalled packages, the
+                        # user won't be able to take the image further
+                        # out of sync.
+                        ignore_inst_parent_deps = True
+                        return solver_cb(ignore_inst_parent_deps)
 
         def __plan_install_solver(self, li_pkg_updates=True, li_sync_op=False,
             new_facets=None, new_variants=None, pkgs_inst=None,
@@ -310,6 +382,21 @@
                 install the specified pkgs, sync the specified image, and/or
                 change facets/variants within the current image."""
 
+                # evaluate what varcet changes are required
+                new_variants, new_facets, \
+                    facet_change, masked_facet_change = \
+                    self.__evaluate_varcets(new_variants, new_facets)
+
+                # check if we need to uninstall any packages.
+                uninstall = self.__any_reject_matches(reject_list)
+
+                # check if anything is actually changing.
+                if not (li_sync_op or pkgs_inst or uninstall or
+                    new_variants or facet_change or fmri_changes is not None):
+                        # the solver is not necessary.
+                        self.pd._fmri_changes = []
+                        return
+
                 # get ranking of publishers
                 pub_ranks = self.image.get_publisher_ranks()
 
@@ -337,27 +424,40 @@
                 else:
                         variants = self.image.get_variants()
 
-                # instantiate solver
-                solver = pkg_solver.PkgSolver(
-                    self.image.get_catalog(self.image.IMG_CATALOG_KNOWN),
-                    installed_dict,
-                    pub_ranks,
-                    variants,
-                    self.image.avoid_set_get(),
-                    self.image.linked.parent_fmris(),
-                    self.__progtrack)
-
-                # If this isn't a sync-linked operation then set
-                # ignore_inst_parent_deps to True to allow us to modify
-                # out of sync images.
-                ignore_inst_parent_deps = not li_sync_op
-
-                # Solve... will raise exceptions if no solution is found
-                new_vector, self.pd._new_avoid_obs = solver.solve_install(
-                        self.image.get_frozen_list(), inst_dict,
-                        new_variants=new_variants, excludes=self.__new_excludes,
-                        reject_set=reject_set, relax_all=li_sync_op,
-                        ignore_inst_parent_deps=ignore_inst_parent_deps)
+                def solver_cb(ignore_inst_parent_deps):
+                        # instantiate solver
+                        solver = pkg_solver.PkgSolver(
+                            self.image.get_catalog(
+                                self.image.IMG_CATALOG_KNOWN),
+                            installed_dict,
+                            pub_ranks,
+                            variants,
+                            self.image.avoid_set_get(),
+                            self.image.linked.parent_fmris(),
+                            self.__progtrack)
+
+                        # run solver
+                        new_vector, new_avoid_obs = \
+                            solver.solve_install(
+                                self.image.get_frozen_list(),
+                                inst_dict,
+                                new_variants=new_variants,
+                                excludes=self.__new_excludes,
+                                reject_set=reject_set,
+                                relax_all=li_sync_op,
+                                ignore_inst_parent_deps=\
+                                    ignore_inst_parent_deps)
+
+                        return solver, new_vector, new_avoid_obs
+
+                # We can't retry this operation while ignoring parent
+                # dependencies if we're doing a linked image sync.
+                retry_wo_parent_deps = not li_sync_op
+
+                # Solve; will raise exceptions if no solution is found.
+                solver, new_vector, self.pd._new_avoid_obs = \
+                    self.__run_solver(solver_cb, \
+                        retry_wo_parent_deps=retry_wo_parent_deps)
 
                 self.pd._fmri_changes = self.__vector_2_fmri_changes(
                     installed_dict, new_vector,
@@ -377,20 +477,6 @@
                 current image."""
 
                 self.__plan_op()
-
-                new_variants, new_facets = self.__evaluate_varcets(new_variants,
-                    new_facets)
-
-                if not (new_variants or pkgs_inst or li_sync_op or
-                    new_facets is not None):
-                        # nothing to do
-                        self.pd._fmri_changes = []
-                        self.pd.state = plandesc.EVALUATED_PKGS
-                        return
-
-                # If we ever actually support changing facets and variants at
-                # the same time as performing an install, the optimizations done
-                # for plan_change_varacets should be applied here (shared).
                 self.__plan_install_solver(
                     li_pkg_updates=li_pkg_updates,
                     li_sync_op=li_sync_op,
@@ -478,88 +564,134 @@
 
                 return use_solver, fmri_changes
 
+        def __facet_change_fastpath(self):
+                """The following optimizations only work correctly if only
+                facets are changing (not variants, uninstalls, etc)."""
+
+                old_facets = self.pd._old_facets
+                new_facets = self.pd._new_facets
+
+                changed_facets = [
+                        f
+                        for f in new_facets
+                        if f not in old_facets or \
+                            old_facets[f] != new_facets[f]
+                ]
+
+                def get_fattrs(m, use_solver):
+                        # Get the list of facets involved in this
+                        # operation that the package uses.  To
+                        # accurately determine which packages are
+                        # actually being changed, we must compare the
+                        # old effective value for each facet that is
+                        # changing with its new effective value.
+                        return use_solver, list(
+                            f
+                            for f in m.gen_facets(
+                                excludes=self.__new_excludes,
+                                patterns=changed_facets)
+                            if new_facets[f] != old_facets[f]
+                        )
+
+                return self.__get_attr_fmri_changes(get_fattrs)
+
+        def __variant_change_fastpath(self):
+                """The following optimizations only work correctly if only
+                variants are changing (not facets, uninstalls, etc)."""
+
+                nvariants = self.pd._new_variants
+
+                def get_vattrs(m, use_solver):
+                        # Get the list of variants involved in this
+                        # operation that the package uses.
+                        mvars = []
+                        for (variant, pvals) in m.gen_variants(
+                            excludes=self.__new_excludes,
+                            patterns=nvariants
+                        ):
+                                if nvariants[variant] not in pvals:
+                                        # If the new value for the
+                                        # variant is unsupported by this
+                                        # package, then the solver
+                                        # should be triggered so the
+                                        # package can be removed.
+                                        use_solver = True
+                                mvars.append(variant)
+                        return use_solver, mvars
+
+                return self.__get_attr_fmri_changes(get_vattrs)
+
         def plan_change_varcets(self, new_facets=None, new_variants=None,
             reject_list=misc.EmptyI):
                 """Determine the fmri changes needed to change the specified
                 facets/variants."""
 
                 self.__plan_op()
-                new_variants, new_facets = self.__evaluate_varcets(new_variants,
-                    new_facets)
-
-                if not new_variants and new_facets is None:
-                        # nothing to do
-                        self.pd._fmri_changes = []
+
+                # assume none of our optimizations will work.
+                fmri_changes = None
+
+                # convenience function to invoke the solver.
+                def plan_install_solver():
+                        self.__plan_install_solver(
+                            new_facets=new_facets,
+                            new_variants=new_variants,
+                            reject_list=reject_list,
+                            fmri_changes=fmri_changes)
                         self.pd.state = plandesc.EVALUATED_PKGS
+
+                # evaluate what varcet changes are required
+                new_variants, new_facets, \
+                    facet_change, masked_facet_change = \
+                    self.__evaluate_varcets(new_variants, new_facets)
+
+                # uninstalling packages requires the solver.
+                uninstall = self.__any_reject_matches(reject_list)
+                if uninstall:
+                        plan_install_solver()
+                        return
+
+                # All operations (including varcet changes) need to try and
+                # keep linked images in sync.  Linked image audits are fast,
+                # so do one now and if we're not in sync we need to invoke the
+                # solver.
+                if not self.image.linked.insync():
+                        plan_install_solver()
+                        return
+
+                # if facets and variants are changing at the same time, then
+                # we need to invoke the solver.
+                if new_variants and facet_change:
+                        plan_install_solver()
                         return
 
                 # By default, we assume the solver must be used.  If any of the
                 # optimizations below can be applied, they'll determine whether
                 # the solver can be used.
                 use_solver = True
-                fmri_changes = None
-
-                # The following use_solver, fmri_changes checks are only known
-                # to work correctly if only facets or only variants are
-                # changing; not both.  Changing both is not currently supported
-                # anyway so this shouldn't be a problem.
-                if new_facets is not None and not new_variants:
-                        old_facets = self.image.cfg.facets
-
-                        def get_fattrs(m, use_solver):
-                                # Get the list of facets involved in this
-                                # operation that the package uses.  To
-                                # accurately determine which packages are
-                                # actually being changed, we must compare the
-                                # old effective value for each facet that is
-                                # changing with its new effective value.
-                                return use_solver, list(
-                                    f
-                                    for f in m.gen_facets(
-                                        excludes=self.__new_excludes,
-                                        patterns=self.pd._changed_facets)
-                                    if new_facets[f] != old_facets[f]
-                                )
-
+
+                # the following facet optimization only works if we're not
+                # changing variants at the same time.
+                if facet_change:
+                        assert not new_variants
                         use_solver, fmri_changes = \
-                            self.__get_attr_fmri_changes(get_fattrs)
-
-                if new_variants and new_facets is None:
-                        nvariants = self.pd._new_variants
-
-                        def get_vattrs(m, use_solver):
-                                # Get the list of variants involved in this
-                                # operation that the package uses.
-                                mvars = []
-                                for (variant, pvals) in m.gen_variants(
-                                    excludes=self.__new_excludes,
-                                    patterns=nvariants
-                                ):
-                                        if nvariants[variant] not in pvals:
-                                                # If the new value for the
-                                                # variant is unsupported by this
-                                                # package, then the solver
-                                                # should be triggered so the
-                                                # package can be removed.
-                                                use_solver = True
-                                        mvars.append(variant)
-                                return use_solver, mvars
-
+                            self.__facet_change_fastpath()
+
+                # the following variant optimization only works if we're not
+                # changing facets at the same time.
+                if new_variants:
+                        assert not facet_change
                         use_solver, fmri_changes = \
-                            self.__get_attr_fmri_changes(get_vattrs)
+                            self.__variant_change_fastpath()
 
                 if use_solver:
-                        self.__plan_install_solver(
-                            fmri_changes=fmri_changes,
-                            new_facets=new_facets,
-                            new_variants=new_variants,
-                            reject_list=reject_list)
-                else:
-                        # If solver isn't involved, assume the list of packages
-                        # has been determined.
-                        self.pd._fmri_changes = fmri_changes and \
-                            fmri_changes or []
-
+                        plan_install_solver()
+                        return
+
+                # If solver isn't involved, assume the list of packages
+                # has been determined.
+                assert fmri_changes is not None
+                self.pd._fmri_changes = fmri_changes
                 self.pd.state = plandesc.EVALUATED_PKGS
 
         def plan_set_mediators(self, new_mediators):
@@ -700,27 +832,36 @@
                 pt.plan_done(pt.PLAN_MEDIATION_CHG)
                 self.pd.state = plandesc.EVALUATED_PKGS
 
+        def __any_reject_matches(self, reject_list):
+                """Check if any reject patterns match installed packages (in
+                which case a packaging operation should attempt to uninstall
+                those packages)."""
+
+                # return true if any packages in reject list
+                # match any installed packages
+                return bool(reject_list) and \
+                    bool(self.match_user_stems(self.image, reject_list,
+                        self.MATCH_INST_VERSIONS, raise_not_installed=False))
+
         def plan_sync(self, li_pkg_updates=True, reject_list=misc.EmptyI):
                 """Determine the fmri changes needed to sync the image."""
 
-                # check if the sync will try to uninstall packages.
-                uninstall = False
-                reject_set = self.match_user_stems(self.image, reject_list,
-                    self.MATCH_INST_VERSIONS, raise_not_installed=False)
-                if reject_set:
-                        # at least one reject pattern matched an installed
-                        # package
-                        uninstall = True
+                self.__plan_op()
+
+                # check if we need to uninstall any packages.
+                uninstall = self.__any_reject_matches(reject_list)
+
+                # check if inherited facets are changing
+                new_facets = self.__evaluate_varcets()[1]
 
                 # audits are fast, so do an audit to check if we're in sync.
-                rv, err, p_dict = self.image.linked.audit_self(
-                    li_parent_sync=False)
-
-                # if we're not trying to uninstall packages and we're
-                # already in sync then don't bother invoking the solver.
-                if not uninstall and rv == pkgdefs.EXIT_OK:
+                insync = self.image.linked.insync()
+
+                # if we're not trying to uninstall packages, and inherited
+                # facets are not changing, and we're already in sync, then
+                # don't bother invoking the solver.
+                if not uninstall and not new_facets is not None and insync:
                         # we don't need to do anything
-                        self.__plan_op()
                         self.pd._fmri_changes = []
                         self.pd.state = plandesc.EVALUATED_PKGS
                         return
@@ -739,33 +880,43 @@
                     for f in each
                 ])
 
+                # check if inherited facets are changing
+                new_facets = self.__evaluate_varcets()[1]
+
                 # build installed dict
                 installed_dict = ImagePlan.__fmris2dict(
                     self.image.gen_installed_pkgs())
 
-                # instantiate solver
-                solver = pkg_solver.PkgSolver(
-                    self.image.get_catalog(self.image.IMG_CATALOG_KNOWN),
-                    installed_dict,
-                    self.image.get_publisher_ranks(),
-                    self.image.get_variants(),
-                    self.image.avoid_set_get(),
-                    self.image.linked.parent_fmris(),
-                    self.__progtrack)
-
-                # Set ignore_inst_parent_deps to True to allow us to
-                # modify out of sync images.
-                new_vector, self.pd._new_avoid_obs = solver.solve_uninstall(
-                    self.image.get_frozen_list(), proposed_removals,
-                    self.__new_excludes,
-                    ignore_inst_parent_deps=True)
-
-                self.pd._fmri_changes = [
-                    (a, b)
-                    for a, b in ImagePlan.__dicts2fmrichanges(installed_dict,
-                        ImagePlan.__fmris2dict(new_vector))
-                    if a != b
-                ]
+                def solver_cb(ignore_inst_parent_deps):
+                        # instantiate solver
+                        solver = pkg_solver.PkgSolver(
+                            self.image.get_catalog(
+                                self.image.IMG_CATALOG_KNOWN),
+                            installed_dict,
+                            self.image.get_publisher_ranks(),
+                            self.image.get_variants(),
+                            self.image.avoid_set_get(),
+                            self.image.linked.parent_fmris(),
+                            self.__progtrack)
+
+                        # run solver
+                        new_vector, new_avoid_obs = \
+                            solver.solve_uninstall(
+                                self.image.get_frozen_list(),
+                                proposed_removals,
+                                self.__new_excludes,
+                                ignore_inst_parent_deps=\
+                                    ignore_inst_parent_deps)
+
+                        return solver, new_vector, new_avoid_obs
+
+                # Solve; will raise exceptions if no solution is found.
+                solver, new_vector, self.pd._new_avoid_obs = \
+                    self.__run_solver(solver_cb)
+
+                self.pd._fmri_changes = self.__vector_2_fmri_changes(
+                    installed_dict, new_vector,
+                    new_facets=new_facets)
 
                 self.pd._solver_summary = str(solver)
                 if DebugValues["plan"]:
@@ -778,6 +929,10 @@
                 """Use the solver to determine the fmri changes needed to
                 update the specified pkgs or all packages if none were
                 specified."""
+
+                # check if inherited facets are changing
+                new_facets = self.__evaluate_varcets()[1]
+
                 # get ranking of publishers
                 pub_ranks = self.image.get_publisher_ranks()
 
@@ -800,37 +955,52 @@
                             reject_set=reject_set)
                         self.__match_update = references
 
-                # instantiate solver
-                solver = pkg_solver.PkgSolver(
-                    self.image.get_catalog(self.image.IMG_CATALOG_KNOWN),
-                    installed_dict,
-                    pub_ranks,
-                    self.image.get_variants(),
-                    self.image.avoid_set_get(),
-                    self.image.linked.parent_fmris(),
-                    self.__progtrack)
-
-                if pkgs_update:
-                        # Set ignore_inst_parent_deps to True to allow us to
-                        # modify out of sync images.
-                        new_vector, self.pd._new_avoid_obs = \
-                            solver.solve_install(
-                                self.image.get_frozen_list(),
-                                update_dict, excludes=self.__new_excludes,
-                                reject_set=reject_set,
-                                trim_proposed_installed=False,
-                                ignore_inst_parent_deps=True)
-                else:
-                        # Updating all installed packages requires a different
-                        # solution path.
-                        new_vector, self.pd._new_avoid_obs = \
-                            solver.solve_update_all(
-                                self.image.get_frozen_list(),
-                                excludes=self.__new_excludes,
-                                reject_set=reject_set)
+                def solver_cb(ignore_inst_parent_deps):
+                        # instantiate solver
+                        solver = pkg_solver.PkgSolver(
+                            self.image.get_catalog(
+                                self.image.IMG_CATALOG_KNOWN),
+                            installed_dict,
+                            pub_ranks,
+                            self.image.get_variants(),
+                            self.image.avoid_set_get(),
+                            self.image.linked.parent_fmris(),
+                            self.__progtrack)
+
+                        # run solver
+                        if pkgs_update:
+                                new_vector, new_avoid_obs = \
+                                    solver.solve_install(
+                                        self.image.get_frozen_list(),
+                                        update_dict,
+                                        excludes=self.__new_excludes,
+                                        reject_set=reject_set,
+                                        trim_proposed_installed=False,
+                                        ignore_inst_parent_deps=\
+                                            ignore_inst_parent_deps)
+                        else:
+                                # Updating all installed packages requires a
+                                # different solution path.
+                                new_vector, new_avoid_obs = \
+                                    solver.solve_update_all(
+                                        self.image.get_frozen_list(),
+                                        excludes=self.__new_excludes,
+                                        reject_set=reject_set)
+
+                        return solver, new_vector, new_avoid_obs
+
+                # We can't retry this operation while ignoring parent
+                # dependencies if we're doing a unconstrained update.
+                retry_wo_parent_deps = bool(pkgs_update)
+
+                # Solve; will raise exceptions if no solution is found.
+                solver, new_vector, self.pd._new_avoid_obs = \
+                    self.__run_solver(solver_cb, \
+                        retry_wo_parent_deps=retry_wo_parent_deps)
 
                 self.pd._fmri_changes = self.__vector_2_fmri_changes(
-                    installed_dict, new_vector)
+                    installed_dict, new_vector,
+                    new_facets=new_facets)
 
                 self.pd._solver_summary = str(solver)
                 if DebugValues["plan"]:
@@ -1702,11 +1872,53 @@
                                     []).append((a, pfmri))
                 return d
 
-        def __update_old(self, new, old, offset_dict, action_classes, sf,
-            gone_fmris, fmri_dict):
-                """Update the 'old' dictionary with all actions from the action
-                cache which could conflict with the newly actions delivered
-                actions contained in 'new.'
+        @staticmethod
+        def __act_dup_check(tgt, key, actstr, fmristr):
+                """Check for duplicate actions/fmri tuples in 'tgt', which is
+                indexed by 'key'."""
+
+                #
+                # When checking for duplicate actions we have to account for
+                # the fact that actions which are part of a package plan are
+                # not stripped.  But the actions we're iterating over here are
+                # coming from the stripped action cache, so they have had
+                # assorted attributes removed (like variants, facets, etc.) So
+                # to check for duplicates we have to make sure to strip the
+                # actions we're comparing against.  Of course we can't just
+                # strip the actions which are part of a package plan because
+                # we could be removing data critical to the execution of that
+                # action like original_name, etc.  So before we strip an
+                # action we have to make a copy of it.
+                #
+                # If we're dealing with a directory action and an "implicit"
+                # attribute exists, we need to preserve it.  We assume it's a
+                # synthetic attribute that indicates that the action was
+                # created implicitly (and hence won't conflict with an
+                # explicit directory action defining the same directory).
+                # Note that we've assumed that no one will ever add an
+                # explicit "implicit" attribute to a directory action.
+                #
+                preserve = {"dir": ["implicit"]}
+                if key not in tgt:
+                        return False
+                for act, pfmri in tgt[key]:
+                        # check the fmri first since that's easy
+                        if fmristr != str(pfmri):
+                                continue
+                        act = pkg.actions.fromstr(str(act))
+                        act.strip(preserve=preserve)
+                        if actstr == str(act):
+                                return True
+                return False
+
+        def __update_act(self, keys, tgt, skip_dups, offset_dict,
+            action_classes, sf, skip_fmris, fmri_dict):
+                """Update 'tgt' with action/fmri pairs from the stripped
+                action cache that are associated with the specified action
+                'keys'.
+
+                The 'skip_dups' parameter indicates if we should avoid adding
+                duplicate action/pfmri pairs into 'tgt'.
 
                 The 'offset_dict' parameter contains a mapping from key to
                 offsets into the actions.stripped file and the number of lines
@@ -1719,9 +1931,9 @@
                 read the actual actions indicated by the offset dictionary
                 'offset_dict.'
 
-                The 'gone_fmris' parameter contains a set of strings
-                representing the packages which are being removed from the
-                system.
+                The 'skip_fmris' parameter contains a set of strings
+                representing the packages which we should not process actions
+                for.
 
                 The 'fmri_dict' parameter is a cache of previously built PkgFmri
                 objects which is used so the same string isn't translated into
@@ -1729,7 +1941,7 @@
 
                 build_release = self.image.attrs["Build-Release"]
 
-                for key in set(itertools.chain(new.iterkeys(), old.keys())):
+                for key in keys:
                         offsets = []
                         for klass in action_classes:
                                 offset = offset_dict.get((klass.name, key),
@@ -1750,86 +1962,7 @@
                                         if line == "":
                                                 break
                                         fmristr, actstr = line.split(None, 1)
-                                        if fmristr in gone_fmris:
-                                                continue
-                                        act = pkg.actions.fromstr(actstr)
-                                        assert act.attrs[act.key_attr] == key
-                                        assert pns is None or \
-                                            act.namespace_group == pns
-                                        pns = act.namespace_group
-
-                                        try:
-                                                pfmri = fmri_dict[fmristr]
-                                        except KeyError:
-                                                pfmri = pkg.fmri.PkgFmri(
-                                                    fmristr,
-                                                    build_release)
-                                                fmri_dict[fmristr] = pfmri
-                                        old.setdefault(key, []).append(
-                                            (act, pfmri))
-
-        def __update_new(self, new, old, offset_dict, action_classes, sf,
-            gone_fmris, fmri_dict):
-                """Update 'new' with all the actions from the action cache which
-                are staying on the system and could conflict with the newly
-                installed actions.
-
-                The 'offset_dict' parameter contains a mapping from key to
-                offsets into the actions.stripped file and the number of lines
-                to read.
-
-                The 'action_classes' parameter contains the list of action types
-                where one action can conflict with another action.
-
-                The 'sf' parameter is the actions.stripped file from which we
-                read the actual actions indicated by the offset dictionary
-                'offset_dict.'
-
-                The 'gone_fmris' parameter contains a set of strings
-                representing the packages which are being removed from the
-                system.
-
-                The 'fmri_dict' parameter is a cache of previously built PkgFmri
-                objects which is used so the same string isn't translated into
-                the same PkgFmri object multiple times."""
-
-                # If we're not changing any fmris, as in the case of a
-                # change-facet/variant, revert, fix, or set-mediator, then we
-                # need to skip modifying new, as it'll just end up with
-                # incorrect duplicates.
-                if self.planned_op in (
-                    pkgdefs.API_OP_CHANGE_FACET,
-                    pkgdefs.API_OP_CHANGE_VARIANT,
-                    pkgdefs.API_OP_REPAIR,
-                    pkgdefs.API_OP_REVERT,
-                    pkgdefs.API_OP_SET_MEDIATOR):
-                        return
-
-                build_release = self.image.attrs["Build-Release"]
-
-                for key in old.iterkeys():
-                        offsets = []
-                        for klass in action_classes:
-                                offset = offset_dict.get((klass.name, key),
-                                    None)
-                                if offset is not None:
-                                        offsets.append(offset)
-
-                        for offset, cnt in offsets:
-                                sf.seek(offset)
-                                pns = None
-                                i = 0
-                                while 1:
-                                        line = sf.readline()
-                                        i += 1
-                                        if i > cnt:
-                                                break
-                                        line = line.rstrip()
-                                        if line == "":
-                                                break
-                                        fmristr, actstr = line.rstrip().split(
-                                            None, 1)
-                                        if fmristr in gone_fmris:
+                                        if fmristr in skip_fmris:
                                                 continue
                                         act = pkg.actions.fromstr(actstr)
                                         assert act.attrs[act.key_attr] == key
@@ -1843,7 +1976,10 @@
                                                 pfmri = pkg.fmri.PkgFmri(
                                                     fmristr, build_release)
                                                 fmri_dict[fmristr] = pfmri
-                                        new.setdefault(key, []).append(
+                                        if skip_dups and self.__act_dup_check(
+                                            tgt, key, actstr, fmristr):
+                                                continue
+                                        tgt.setdefault(key, []).append(
                                             (act, pfmri))
 
         def __fast_check(self, new, old, ns):
@@ -2113,11 +2249,20 @@
                         self.__clear_pkg_plans()
                         return
 
+                # figure out which installed packages are being removed by
+                # this operation
                 old_fmris = set((
                     str(s) for s in self.image.gen_installed_pkgs()
                 ))
                 gone_fmris = old_fmris - new_fmris
 
+                # figure out which new packages are being touched by this
+                # operation.
+                changing_fmris = set([
+                        str(p.destination_fmri)
+                        for p in self.pd.pkg_plans
+                        if p.destination_fmri
+                ])
 
                 # Group action types by namespace groups
                 kf = operator.attrgetter("namespace_group")
@@ -2173,15 +2318,20 @@
                                 # cache which could conflict with the new
                                 # actions being installed, or with actions
                                 # already installed, but not getting removed.
-                                self.__update_old(new, old, offset_dict,
-                                    action_classes, msf, gone_fmris, fmri_dict)
+                                keys = set(itertools.chain(new.iterkeys(),
+                                    old.iterkeys()))
+                                self.__update_act(keys, old, False,
+                                    offset_dict, action_classes, msf,
+                                    gone_fmris, fmri_dict)
 
                                 # Now update 'new' with all actions from the
                                 # action cache which are staying on the system,
                                 # and could conflict with the actions being
                                 # installed.
-                                self.__update_new(new, old, offset_dict,
-                                    action_classes, msf, gone_fmris, fmri_dict)
+                                keys = set(old.iterkeys())
+                                self.__update_act(keys, new, True,
+                                    offset_dict, action_classes, msf,
+                                    gone_fmris | changing_fmris, fmri_dict)
 
                         self.__check_conflicts(new, old, action_classes, ns,
                             errs)
@@ -2334,6 +2484,16 @@
                 self.merge_actions()
                 self.compile_release_notes()
 
+                fmri_updates = [
+                        (p.origin_fmri, p.destination_fmri)
+                        for p in self.pd.pkg_plans
+                ]
+                if not self.pd._li_pkg_updates and fmri_updates:
+                        # oops.  the caller requested no package updates and
+                        # we couldn't satisfy that request.
+                        raise api_errors.PlanCreationException(
+                            pkg_updates_required=fmri_updates)
+
                 for p in self.pd.pkg_plans:
                         cpbytes, pbytes = p.get_bytes_added()
                         if p.destination_fmri:
@@ -3377,6 +3537,20 @@
                 self.pd.update_actions.sort(key=addsort)
                 self.pd.install_actions.sort(key=addsort)
 
+                # cleanup pkg_plan objects which don't actually contain any
+                # changes
+                for p in list(self.pd.pkg_plans):
+                        if p.origin_fmri != p.destination_fmri or \
+                            p.actions.removed or p.actions.changed or \
+                            p.actions.added:
+                                continue
+                        self.pd.pkg_plans.remove(p)
+                        fmri = p.origin_fmri
+                        if (fmri, fmri) in self.pd._fmri_changes:
+                                self.pd._fmri_changes.remove(
+                                    (fmri, fmri))
+                        del p
+
                 #
                 # Sort the package plans by fmri to create predictability (and
                 # some sense of order) in the download output; this is not
--- a/src/modules/client/linkedimage/common.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/linkedimage/common.py	Wed Aug 07 15:30:45 2013 -0700
@@ -59,6 +59,7 @@
 import pkg.client.pkgplan as pkgplan
 import pkg.client.pkgremote
 import pkg.client.progress as progress
+import pkg.facet
 import pkg.fmri
 import pkg.misc as misc
 import pkg.pkgsubprocess
@@ -107,6 +108,7 @@
 
 # files which contain linked image data
 __DATA_DIR     = "linked"
+PATH_PFACETS    = os.path.join(__DATA_DIR, "linked_pfacets")
 PATH_PPKGS     = os.path.join(__DATA_DIR, "linked_ppkgs")
 PATH_PROP      = os.path.join(__DATA_DIR, "linked_prop")
 PATH_PUBS      = os.path.join(__DATA_DIR, "linked_ppubs")
@@ -409,6 +411,7 @@
                 self.__props = dict()
                 self.__ppkgs = frozenset()
                 self.__ppubs = None
+                self.__pfacets = pkg.facet.Facets()
                 self.__pimg = None
 
                 # variables reset by self.__recursion_init()
@@ -420,6 +423,8 @@
                 self.__path_ppkgs = None
                 self.__path_prop = None
                 self.__path_ppubs = None
+                self.__path_pfacets = None
+                self.__img_insync = True
 
                 # initialize with no properties
                 self.__update_props()
@@ -469,9 +474,20 @@
                 self.__path_ppkgs = os.path.join(imgdir, PATH_PPKGS)
                 self.__path_prop = os.path.join(imgdir, PATH_PROP)
                 self.__path_ppubs = os.path.join(imgdir, PATH_PUBS)
+                self.__path_pfacets = os.path.join(imgdir, PATH_PFACETS)
 
                 # if this isn't a reset, then load data from the image
                 if not old_root:
+                        # the first time around we load non-temporary data (if
+                        # there is any) so that we can audit ourselves and see
+                        # if we're in currently in sync.
+                        self.__load(tmp=False)
+                        if self.ischild():
+                                self.__img_insync = self.__insync()
+
+                        # now re-load all the data taking into account any
+                        # temporary new data associated with an in-progress
+                        # operation.
                         self.__load()
 
                 # we're not linked or we're not changing root paths we're done
@@ -513,6 +529,7 @@
                 self.__props = props
                 self.__ppkgs = frozenset()
                 self.__ppubs = None
+                self.__pfacets = pkg.facet.Facets()
                 self.__pimg = None
 
         def __verify_props(self, props):
@@ -707,30 +724,57 @@
                 self.__verify_props(props)
                 return props
 
-        def __load_ondisk_ppkgs(self, tmp=True):
-                """Load linked image parent constraints from disk.
+        def __load_ondisk_pfacets(self, tmp=True):
+                """Load linked image inherited facets from disk.
                 Don't update any internal state.
 
                 'tmp' determines if we should read/write to the official
                 linked image metadata files, or if we should access temporary
                 versions (which have ".<runid>" appended to them."""
 
+                pfacets = misc.EmptyDict
+                path = "%s.%d" % (self.__path_pfacets,
+                    global_settings.client_runid)
+                if tmp and path_exists(path):
+                        pfacets = load_data(path)
+                else:
+                        path = self.__path_pfacets
+                        pfacets = load_data(path, missing_ok=True)
+
+                if pfacets is None:
+                        return None
+
+                rv = pkg.facet.Facets()
+                for k, v in pfacets.iteritems():
+                        # W0212 Access to a protected member
+                        # pylint: disable=W0212
+                        rv._set_inherited(k, v)
+                return rv
+
+        def __load_ondisk_ppkgs(self, tmp=True):
+                """Load linked image parent packages from disk.
+                Don't update any internal state.
+
+                'tmp' determines if we should read/write to the official
+                linked image metadata files, or if we should access temporary
+                versions (which have ".<runid>" appended to them."""
+
+                fmri_strs = None
                 path = "%s.%d" % (self.__path_ppkgs,
                     global_settings.client_runid)
                 if tmp and path_exists(path):
-                        return frozenset([
-                            pkg.fmri.PkgFmri(str(s))
-                            for s in load_data(path, missing_val=misc.EmptyI)
-                        ])
-
-                path = self.__path_ppkgs
-                if path_exists(path):
-                        return frozenset([
-                            pkg.fmri.PkgFmri(str(s))
-                            for s in load_data(path, missing_val=misc.EmptyI)
-                        ])
-
-                return None
+                        fmri_strs = load_data(path)
+                else:
+                        path = self.__path_ppkgs
+                        fmri_strs = load_data(path, missing_ok=True)
+
+                if fmri_strs is None:
+                        return None
+
+                return frozenset([
+                    pkg.fmri.PkgFmri(str(s))
+                    for s in fmri_strs
+                ])
 
         def __load_ondisk_ppubs(self, tmp=True):
                 """Load linked image parent publishers from disk.
@@ -740,18 +784,18 @@
                 linked image metadata files, or if we should access temporary
                 versions (which have ".<runid>" appended to them."""
 
+                ppubs = None
                 path = "%s.%d" % (self.__path_ppubs,
                     global_settings.client_runid)
                 if tmp and path_exists(path):
-                        return load_data(path)
-
-                path = self.__path_ppubs
-                if path_exists(path):
-                        return load_data(path)
-
-                return None
-
-        def __load(self):
+                        ppubs = load_data(path)
+                else:
+                        path = self.__path_ppubs
+                        ppubs = load_data(path, missing_ok=True)
+
+                return ppubs
+
+        def __load(self, tmp=True):
                 """Load linked image properties and constraints from disk.
                 Update the linked image internal state with the loaded data."""
 
@@ -772,7 +816,7 @@
                 # and the caller will have to specify that they want to ignore
                 # all children to allow the operation to succeed.
                 #
-                props = self.__load_ondisk_props()
+                props = self.__load_ondisk_props(tmp=tmp)
                 if not props and not self.__isparent(ignore_errors=True):
                         # we're not linked
                         return
@@ -792,16 +836,25 @@
 
                 self.__update_props(props)
 
-                ppkgs = self.__load_ondisk_ppkgs()
-                if self.ischild() and ppkgs == None:
-                        _rterr(li=self, err="Constraints data missing.")
-                if self.ischild():
+                if not self.ischild():
+                        return
+
+                # load parent packages. if parent package data is missing just
+                # continue along and hope for the best.
+                ppkgs = self.__load_ondisk_ppkgs(tmp=tmp)
+                if ppkgs is not None:
                         self.__ppkgs = ppkgs
 
+                # load inherited facets. if inherited facet data is missing
+                # just continue along and hope for the best.
+                pfacets = self.__load_ondisk_pfacets(tmp=tmp)
+                if pfacets is not None:
+                        self.__pfacets = pfacets
+
                 # load parent publisher data. if publisher data is missing
                 # continue along and we'll just skip the publisher checks,
                 # it's better than failing and preventing any image updates.
-                self.__ppubs = self.__load_ondisk_ppubs()
+                self.__ppubs = self.__load_ondisk_ppubs(tmp=tmp)
 
         @staticmethod
         def __validate_prop_recurse(v):
@@ -886,6 +939,21 @@
                 differs then there is stuff to do since the new state needs
                 to be saved to disk."""
 
+                # check if we're not a linked image.
+                if not self.isparent() and not self.ischild():
+                        # if any linked image metadata files exist they need
+                        # to be deleted.
+                        paths = [
+                            self.__path_pfacets,
+                            self.__path_ppkgs,
+                            self.__path_ppubs,
+                            self.__path_prop,
+                        ]
+                        for path in paths:
+                                if path_exists(path):
+                                        return False
+                        return True
+
                 # compare in-memory and on-disk properties
                 li_ondisk_props = self.__load_ondisk_props(tmp=False)
                 if li_ondisk_props == None:
@@ -898,10 +966,35 @@
                 if li_ondisk_props != li_inmemory_props:
                         return False
 
-                # compare in-memory and on-disk constraints
+                # linked image metadata files with inherited data
+                paths = [
+                    self.__path_pfacets,
+                    self.__path_ppkgs,
+                    self.__path_ppubs,
+                ]
+
+                # check if we're just a parent image.
+                if not self.ischild():
+                        # parent images only have properties.  if any linked
+                        # image metadata files that contain inherited
+                        # information exist they need to be deleted.
+                        for path in paths:
+                                if path_exists(path):
+                                        return False
+                        return True
+
+                # if we're missing any metadata files then there's work todo
+                for path in paths:
+                        if not path_exists(path):
+                                return False
+
+                # compare in-memory and on-disk inherited facets
+                li_ondisk_pfacets = self.__load_ondisk_pfacets(tmp=False)
+                if self.__pfacets != li_ondisk_pfacets:
+                        return False
+
+                # compare in-memory and on-disk parent packages
                 li_ondisk_ppkgs = self.__load_ondisk_ppkgs(tmp=False)
-                if li_ondisk_ppkgs == None:
-                        li_ondisk_ppkgs = frozenset()
                 if self.__ppkgs != li_ondisk_ppkgs:
                         return False
 
@@ -912,34 +1005,6 @@
 
                 return True
 
-        def get_pubs(self, img=None):
-                """Return publisher information for the specified image.  If
-                no image is specified we return publisher information for the
-                current image.
-
-                Publisher information is returned in a sorted list of lists
-                of the format:
-                        <publisher name>, <sticky>
-
-                Where:
-                        <publisher name> is a string
-                        <sticky> is a boolean
-
-                The tuples are sorted by publisher rank.
-                """
-
-                # default to ourselves
-                if img == None:
-                        img = self.__img
-
-                # get a sorted list of the images publishers
-                pubs = img.get_sorted_publishers(inc_disabled=False)
-
-                rv = []
-                for p in pubs:
-                        rv.append([str(p), p.sticky])
-                return rv
-
         def pubcheck(self):
                 """If we're a child image's, verify that the parent image
                 publisher configuration is a subset of the child images
@@ -962,7 +1027,7 @@
                 if self.__img.cfg.get_policy("use-system-repo"):
                         return
 
-                pubs = self.get_pubs()
+                pubs = get_pubs(self.__img)
                 ppubs = self.__ppubs
 
                 if ppubs == None:
@@ -982,7 +1047,7 @@
                         raise apx.PlanCreationException(
                             linked_pub_error=(pubs, ppubs))
 
-        def syncmd_from_parent(self, api_op=None):
+        def __syncmd_from_parent(self):
                 """Update linked image constraint, publisher data, and
                 state from our parent image."""
 
@@ -999,41 +1064,35 @@
                         path = self.__props[PROP_PARENT_PATH]
                         self.__pimg = self.__init_pimg(path)
 
-                # generate new constraints
-                cati = self.__pimg.get_catalog(self.__img.IMG_CATALOG_INSTALLED)
-                ppkgs = frozenset(cati.fmris())
-
-                # generate new publishers
-                ppubs = self.get_pubs(img=self.__pimg)
-
-                # check if anything has changed
-                need_sync = False
-
-                if self.__ppkgs != ppkgs:
-                        # we have new constraints
-                        self.__ppkgs = ppkgs
-                        need_sync = True
-
-                if self.__ppubs != ppubs:
-                        # parent has new publishers
-                        self.__ppubs = ppubs
-                        need_sync = True
-
-                if not need_sync:
-                        # nothing changed
-                        return
-
-                # if we're not planning an image attach operation then write
-                # the linked image metadata to disk.
-                if api_op != pkgdefs.API_OP_ATTACH:
-                        self.syncmd()
+                # get metadata from our parent image
+                self.__ppubs = get_pubs(self.__pimg)
+                self.__ppkgs = get_packages(self.__pimg)
+                self.__pfacets = get_inheritable_facets(self.__pimg)
+
+        def syncmd_from_parent(self, catch_exception=False):
+                """Update linked image constraint, publisher data, and state
+                from our parent image.  If catch_exception is true catch any
+                linked image exceptions and pack them up in a linked image
+                return value tuple."""
+
+                try:
+                        self.__syncmd_from_parent()
+                except apx.LinkedImageException, e:
+                        if not catch_exception:
+                                raise e
+                        return LI_RVTuple(e.lix_exitrv, e, None)
+                return
 
         def syncmd(self):
                 """Write in-memory linked image state to disk."""
 
                 # create a list of metadata file paths
-                paths = [self.__path_ppkgs, self.__path_prop,
-                    self.__path_ppubs]
+                paths = [
+                    self.__path_pfacets,
+                    self.__path_ppkgs,
+                    self.__path_ppubs,
+                    self.__path_prop,
+                ]
 
                 # cleanup any temporary files
                 for path in paths:
@@ -1055,11 +1114,14 @@
                 save_data(self.__path_prop, props)
 
                 if not self.ischild():
-                        # if we're not a child we don't have constraints
+                        # if we're not a child we don't have parent data
+                        path_unlink(self.__path_pfacets, noent_ok=True)
                         path_unlink(self.__path_ppkgs, noent_ok=True)
+                        path_unlink(self.__path_ppubs, noent_ok=True)
                         return
 
                 # we're a child so save our latest constraints
+                save_data(self.__path_pfacets, self.__pfacets)
                 save_data(self.__path_ppkgs, self.__ppkgs)
                 save_data(self.__path_ppubs, self.__ppubs)
 
@@ -1151,6 +1213,10 @@
                 for lin in lin_list:
                         self.__verify_child_name(lin, raise_except=True)
 
+        def inherited_facets(self):
+                """Facets inherited from our parent image."""
+                return self.__pfacets
+
         def parent_fmris(self):
                 """A set of the fmris installed in our parent image."""
 
@@ -1399,7 +1465,7 @@
                                 return False
                 return True
 
-        def audit_self(self, li_parent_sync=True):
+        def audit_self(self, latest_md=True):
                 """If the current image is a child image, this function
                 audits the current image to see if it's in sync with its
                 parent."""
@@ -1408,14 +1474,17 @@
                         e = self.__apx_not_child()
                         return LI_RVTuple(pkgdefs.EXIT_OOPS, e, None)
 
-                try:
-                        if li_parent_sync:
-                                # try to refresh linked image constraints from
-                                # the parent image.
-                                self.syncmd_from_parent()
-
-                except apx.LinkedImageException, e:
-                        return LI_RVTuple(e.lix_exitrv, e, None)
+                if not latest_md:
+                        # we don't use the latest linked image metadata.
+                        # instead return cached insync value which was
+                        # computed using the initial linked image metadata
+                        # that we loaded from disk.
+                        if not self.__img_insync:
+                                e = apx.LinkedImageException(
+                                    child_diverged=self.child_name)
+                                return LI_RVTuple(pkgdefs.EXIT_DIVERGED, e,
+                                    None)
+                        return LI_RVTuple(pkgdefs.EXIT_OK, None, None)
 
                 if not self.__insync():
                         e = apx.LinkedImageException(
@@ -1424,6 +1493,16 @@
 
                 return LI_RVTuple(pkgdefs.EXIT_OK, None, None)
 
+        def insync(self, latest_md=True):
+                """A convenience wrapper for audit_self().  Note that we
+                consider non-child images as always in sync and ignore
+                any runtime errors."""
+
+                rv = self.image.linked.audit_self(latest_md=latest_md)[0]
+                if rv == pkgdefs.EXIT_DIVERGED:
+                        return False
+                return True
+
         @staticmethod
         def __rvdict2rv(rvdict, rv_map=None):
                 """Internal helper function that takes a dictionary returned
@@ -1711,8 +1790,8 @@
                     _progtrack=progtrack,
                     _failfast=False,
                     _expect_plan=True,
+                    _syncmd_tmp=True,
                     accept=accept,
-                    li_attach_sync=True,
                     li_md_only=li_md_only,
                     li_pkg_updates=li_pkg_updates,
                     noexecute=noexecute,
@@ -1761,7 +1840,7 @@
                     _failfast=False))
                 return rvdict
 
-        def sync_children(self, lin_list, accept=False, li_attach_sync=False,
+        def sync_children(self, lin_list, accept=False,
             li_md_only=False, li_pkg_updates=True, progtrack=None,
             noexecute=False, refresh_catalogs=True, reject_list=misc.EmptyI,
             show_licenses=False, update_index=True):
@@ -1775,6 +1854,10 @@
 
                 lic_dict = self.__children_init(lin_list=lin_list)
 
+                _syncmd_tmp = True
+                if not noexecute and li_md_only:
+                        _syncmd_tmp = False
+
                 rvdict = {}
                 list(self.__children_op(
                     _pkg_op=pkgdefs.PKG_OP_SYNC,
@@ -1783,8 +1866,8 @@
                     _progtrack=progtrack,
                     _failfast=False,
                     _expect_plan=True,
+                    _syncmd_tmp=_syncmd_tmp,
                     accept=accept,
-                    li_attach_sync=li_attach_sync,
                     li_md_only=li_md_only,
                     li_pkg_updates=li_pkg_updates,
                     noexecute=noexecute,
@@ -1794,7 +1877,8 @@
                     update_index=update_index))
                 return rvdict
 
-        def detach_children(self, lin_list, force=False, noexecute=False):
+        def detach_children(self, lin_list, force=False, noexecute=False,
+            li_md_only=False, li_pkg_updates=True):
                 """Detach one or more children from the current image. This
                 operation results in the removal of any constraint package
                 from the child images."""
@@ -1827,6 +1911,8 @@
                     _rvdict=rvdict,
                     _progtrack=progress.NullProgressTracker(),
                     _failfast=False,
+                    li_md_only=li_md_only,
+                    li_pkg_updates=li_pkg_updates,
                     noexecute=noexecute))
 
                 # if any of the children successfully detached, then we want
@@ -1860,9 +1946,9 @@
 
                 return rvdict
 
-        @staticmethod
-        def __children_op(_pkg_op, _lic_list, _rvdict, _progtrack, _failfast,
-            _expect_plan=False, _ignore_syncmd_nop=True, _pd=None, **kwargs):
+        def __children_op(self, _pkg_op, _lic_list, _rvdict, _progtrack,
+            _failfast, _expect_plan=False, _ignore_syncmd_nop=True,
+            _syncmd_tmp=False, _pd=None, **kwargs):
                 """An iterator function which performs a linked image
                 operation on multiple children in parallel.
 
@@ -1890,6 +1976,10 @@
                 always recurse into a child even if the linked image meta data
                 isn't changing.
 
+                '_syncmd_tmp' a boolean that indicates if we should write
+                linked image metadata in a temporary location in child images,
+                or just overwrite any existing data.
+
                 '_pd' a PlanDescription pointer."""
 
                 if _lic_list:
@@ -1908,12 +1998,27 @@
                 else:
                         concurrency = global_settings.client_concurrency
 
+                # If we have a plan for the current image that means linked
+                # image metadata is probably changing so we always save it to
+                # a temporary file (and we don't overwrite the existing
+                # metadata until after we execute the plan).
+                if _pd is not None:
+                        _syncmd_tmp = True
+
+                # get parent metadata common to all child images
+                _pmd = None
+                if _pkg_op != pkgdefs.PKG_OP_DETACH:
+                        ppubs = get_pubs(self.__img)
+                        ppkgs = get_packages(self.__img, pd=_pd)
+                        pfacets = get_inheritable_facets(self.__img, pd=_pd)
+                        _pmd = (ppubs, ppkgs, pfacets)
+
                 # setup operation for each child
                 lic_setup = []
                 for lic in _lic_list:
                         try:
-                                lic.child_op_setup(_pkg_op, _progtrack,
-                                    _ignore_syncmd_nop, _pd, **kwargs)
+                                lic.child_op_setup(_pkg_op, _pmd, _progtrack,
+                                    _ignore_syncmd_nop, _syncmd_tmp, **kwargs)
                                 lic_setup.append(lic)
                         except apx.LinkedImageException, e:
                                 _rvdict[lic.child_name] = \
@@ -2186,7 +2291,7 @@
                 # changing.  (this would be required for recursive operations,
                 # update operations, etc.)
                 _ignore_syncmd_nop = True
-                if pkg_op == pkgdefs.API_OP_SYNC:
+                if pd.child_op_implicit:
                         # the exception is if we're doing an implicit sync.
                         # to improve performance we assume the child is
                         # already in sync, so if its linked image metadata
@@ -2241,12 +2346,16 @@
                 # of specific packages, etc, then when we recurse we'll do a
                 # sync in the child.
                 #
+                implicit = False
                 if api_op == pkgdefs.API_OP_UPDATE and not \
                     api_kwargs["pkgs_update"]:
                         pkg_op = pkgdefs.PKG_OP_UPDATE
+                elif api_op == pkgdefs.API_OP_SYNC:
+                        pkg_op = pkgdefs.PKG_OP_SYNC
                 else:
                         pkg_op = pkgdefs.PKG_OP_SYNC
-                return pkg_op
+                        implicit = True
+                return pkg_op, implicit
 
         @staticmethod
         def __recursion_args(pd, refresh_catalogs, update_index, api_kwargs):
@@ -2292,7 +2401,8 @@
                 api_op = pd.plan_type
 
                 # update the plan arguments
-                pd.child_op = self.__recursion_op(api_op, api_kwargs)
+                pd.child_op, pd.child_op_implicit = \
+                    self.__recursion_op(api_op, api_kwargs)
                 pd.child_kwargs = self.__recursion_args(pd,
                     refresh_catalogs, update_index, api_kwargs)
                 pd.children_ignored = self.__lic_ignore
@@ -2318,6 +2428,7 @@
                 pd.li_props = self.__props
                 pd.li_ppkgs = self.__ppkgs
                 pd.li_ppubs = self.__ppubs
+                pd.li_pfacets = self.__pfacets
 
         def setup_plan(self, pd):
                 """Reload a previously created plan."""
@@ -2326,6 +2437,7 @@
                 self.__update_props(pd.li_props)
                 self.__ppubs = pd.li_ppubs
                 self.__ppkgs = pd.li_ppkgs
+                self.__pfacets = pd.li_pfacets
 
                 # now initialize our recursion state, this involves allocating
                 # handles to operate on children.  we don't need handles for
@@ -2521,6 +2633,7 @@
                 self.__path_ppkgs = os.path.join(imgdir, PATH_PPKGS)
                 self.__path_prop = os.path.join(imgdir, PATH_PROP)
                 self.__path_ppubs = os.path.join(imgdir, PATH_PUBS)
+                self.__path_pfacets = os.path.join(imgdir, PATH_PFACETS)
 
                 # initialize a linked image child plugin
                 self.__plugin = \
@@ -2589,37 +2702,29 @@
 
                 return True
 
-        def __push_ppkgs(self, tmp=False, test=False, pd=None):
+        def __push_ppkgs(self, ppkgs, tmp=False, test=False):
                 """Sync linked image parent constraint data to a child image.
 
                 'tmp' determines if we should read/write to the official
                 linked image metadata files, or if we should access temporary
                 versions (which have ".<runid>" appended to them."""
 
-                # there has to be an image plan to export
-                cati = self.__img.get_catalog(self.__img.IMG_CATALOG_INSTALLED)
-                ppkgs = set(cati.fmris())
-
-                if pd is not None:
-                        # if there's an image plan the we need to update the
-                        # installed packages based on that plan.
-                        for src, dst in pd.plan_desc:
-                                if src == dst:
-                                        continue
-                                if src:
-                                        assert src in ppkgs
-                                        ppkgs -= set([src])
-                                if dst:
-                                        assert dst not in ppkgs
-                                        ppkgs |= set([dst])
-
-                # paranoia
-                ppkgs = frozenset(ppkgs)
-
-                # save the planned cips
+                # save the planned parent packages
                 return self.__push_data(self.child_path, self.__path_ppkgs,
                     ppkgs, tmp, test)
 
+        def __push_pfacets(self, pfacets, tmp=False, test=False):
+                """Sync linked image parent facet data to a child image.
+
+                'tmp' determines if we should read/write to the official
+                linked image metadata files, or if we should access temporary
+                versions (which have ".<runid>" appended to them."""
+
+                # save the planned parent facets
+                return self.__push_data(self.child_path, self.__path_pfacets,
+                    pfacets, tmp, test)
+
+
         def __push_props(self, tmp=False, test=False):
                 """Sync linked image properties data to a child image.
 
@@ -2639,40 +2744,46 @@
                 return self.__push_data(self.child_path, self.__path_prop,
                     props, tmp, test)
 
-        def __push_ppubs(self, tmp=False, test=False):
+        def __push_ppubs(self, ppubs, tmp=False, test=False):
                 """Sync linked image parent publisher data to a child image.
 
                 'tmp' determines if we should read/write to the official
                 linked image metadata files, or if we should access temporary
                 versions (which have ".<runid>" appended to them."""
 
-                ppubs = self.__linked.get_pubs()
                 return self.__push_data(self.child_path, self.__path_ppubs,
                     ppubs, tmp, test)
 
-        def __syncmd(self, tmp=False, test=False, pd=None):
+        def __syncmd(self, pmd, tmp=False, test=False):
                 """Sync linked image data to a child image.
 
                 'tmp' determines if we should read/write to the official
                 linked image metadata files, or if we should access temporary
                 versions (which have ".<runid>" appended to them."""
 
-                if pd:
-                        tmp = True
-
-                ppkgs_updated = self.__push_ppkgs(tmp, test, pd=pd)
+                # unpack parent metadata tuple
+                ppubs, ppkgs, pfacets = pmd
+
+                ppkgs_updated = self.__push_ppkgs(ppkgs, tmp, test)
                 props_updated = self.__push_props(tmp, test)
-                pubs_updated = self.__push_ppubs(tmp, test)
-
-                return (props_updated or ppkgs_updated or pubs_updated)
-
-        def __child_op_setup_syncmd(self, ignore_syncmd_nop=True,
-            tmp=False, test=False, pd=None, stage=pkgdefs.API_STAGE_DEFAULT):
+                pubs_updated = self.__push_ppubs(ppubs, tmp, test)
+                pfacets_updated = self.__push_pfacets(pfacets, tmp, test)
+
+                return (props_updated or ppkgs_updated or pubs_updated or
+                    pfacets_updated)
+
+        def __child_op_setup_syncmd(self, pmd, ignore_syncmd_nop=True,
+            tmp=False, test=False, stage=pkgdefs.API_STAGE_DEFAULT):
                 """Prepare to perform an operation on a child image by syncing
                 the latest linked image data to that image.  As part of this
                 operation, if we discover that the meta data hasn't changed we
                 may report back that there is nothing to do (EXIT_NOP).
 
+                'pmd' is a tuple that contains parent metadata that we will
+                sync to the child image.  Note this is not all the metadata
+                that we will sync, just the set which is common to all
+                children.
+
                 'ignore_syncmd_nop' a boolean that indicates if we should
                 always recurse into a child even if the linked image meta data
                 isn't changing.
@@ -2685,23 +2796,16 @@
                 image meta data, instead we should just test to see if the
                 meta data is changing.
 
-                'pd' an optional plan description object.  this plan
-                description describes changes that will be made to the parent
-                image.  if this is supplied then we derive the meta data that
-                we write into the child from the planned parent image state
-                (instead of the current parent image state).
-
                 'stage' indicates which stage of execution we should be
                 performing on a child image."""
 
-                # we don't actually update metadata during other stages of
-                # operation
+                # we don't update metadata during all stages of operation
                 if stage not in [
                     pkgdefs.API_STAGE_DEFAULT, pkgdefs.API_STAGE_PLAN]:
                         return True
 
                 try:
-                        updated = self.__syncmd(tmp=tmp, test=test, pd=pd)
+                        updated = self.__syncmd(pmd, tmp=tmp, test=test)
                 except apx.LinkedImageException, e:
                         self.__child_op_rvtuple = \
                             LI_RVTuple(e.lix_exitrv, e, None)
@@ -2720,9 +2824,9 @@
                     LI_RVTuple(pkgdefs.EXIT_NOP, None, None)
                 return False
 
-        def __child_setup_sync(self, _progtrack, _ignore_syncmd_nop, _pd,
+        def __child_setup_sync(self, _pmd, _progtrack, _ignore_syncmd_nop,
+            _syncmd_tmp,
             accept=False,
-            li_attach_sync=False,
             li_md_only=False,
             li_pkg_updates=True,
             noexecute=False,
@@ -2735,9 +2839,6 @@
                 linked image metadata in the child and then possibly recursing
                 into the child to actually update packages.
 
-                'li_attach_sync' indicates if this sync is part of an attach
-                operation.
-
                 For descriptions of parameters please see the descriptions in
                 api.py`gen_plan_*"""
 
@@ -2749,8 +2850,7 @@
                         # we don't support updating packages in the parent
                         # during attach metadata only sync.
                         #
-                        assert not _pd
-                        if not self.__child_op_setup_syncmd(
+                        if not self.__child_op_setup_syncmd(_pmd,
                             ignore_syncmd_nop=False,
                             test=noexecute, stage=stage):
                                 # the update failed
@@ -2772,10 +2872,9 @@
                 # we don't support updating packages in the parent
                 # during attach.
                 #
-                assert not li_attach_sync or _pd is None
-                if not self.__child_op_setup_syncmd(
+                if not self.__child_op_setup_syncmd(_pmd,
                     ignore_syncmd_nop=_ignore_syncmd_nop,
-                    tmp=li_attach_sync, stage=stage, pd=_pd):
+                    tmp=_syncmd_tmp, stage=stage):
                         # the update failed or the metadata didn't change
                         return
 
@@ -2805,7 +2904,8 @@
                     update_index=update_index,
                     verbose=global_settings.client_output_verbose)
 
-        def __child_setup_update(self, _progtrack, _ignore_syncmd_nop, _pd,
+        def __child_setup_update(self, _pmd, _progtrack, _ignore_syncmd_nop,
+            _syncmd_tmp,
             accept=False,
             force=False,
             noexecute=False,
@@ -2817,8 +2917,9 @@
                 """Prepare to update a child image."""
 
                 # first sync the metadata
-                if not self.__child_op_setup_syncmd(
-                    ignore_syncmd_nop=_ignore_syncmd_nop, pd=_pd, stage=stage):
+                if not self.__child_op_setup_syncmd(_pmd,
+                    ignore_syncmd_nop=_ignore_syncmd_nop,
+                    tmp=_syncmd_tmp, stage=stage):
                         # the update failed or the metadata didn't change
                         return
 
@@ -2845,23 +2946,27 @@
                     update_index=update_index,
                     verbose=global_settings.client_output_verbose)
 
-        def __child_setup_detach(self, _progtrack, noexecute=False):
+        def __child_setup_detach(self, _progtrack, li_md_only=False,
+            li_pkg_updates=True, noexecute=False):
                 """Prepare to detach a child image."""
 
                 self.__pkg_remote.setup(self.child_path,
                     pkgdefs.PKG_OP_DETACH,
                     force=True,
+                    li_md_only=li_md_only,
+                    li_pkg_updates=li_pkg_updates,
                     li_target_all=False,
                     li_target_list=[],
                     noexecute=noexecute,
                     quiet=global_settings.client_output_quiet,
                     verbose=global_settings.client_output_verbose)
 
-        def __child_setup_pubcheck(self):
+        def __child_setup_pubcheck(self, _pmd):
                 """Prepare to a check if a child's publishers are in sync."""
 
                 # first sync the metadata
-                if not self.__child_op_setup_syncmd():
+                # a pubcheck should never update persistent meta data
+                if not self.__child_op_setup_syncmd(_pmd, tmp=True):
                         # the update failed
                         return
 
@@ -2869,12 +2974,12 @@
                 self.__pkg_remote.setup(self.child_path,
                     pkgdefs.PKG_OP_PUBCHECK)
 
-        def __child_setup_audit(self):
+        def __child_setup_audit(self, _pmd):
                 """Prepare to a child image to see if it's in sync with its
                 constraints."""
 
                 # first sync the metadata
-                if not self.__child_op_setup_syncmd():
+                if not self.__child_op_setup_syncmd(_pmd, tmp=True):
                         # the update failed
                         return
 
@@ -2893,25 +2998,25 @@
                 self.__pkg_remote.abort()
                 self.__child_op_rvtuple = None
 
-        def child_op_setup(self, _pkg_op, _progtrack, _ignore_syncmd_nop, _pd,
-            **kwargs):
+        def child_op_setup(self, _pkg_op, _pmd, _progtrack, _ignore_syncmd_nop,
+            _syncmd_tmp, **kwargs):
                 """Public interface to setup an operation that we'd like to
                 perform on a child image."""
 
                 assert self.__child_op_rvtuple is None
 
                 if _pkg_op == pkgdefs.PKG_OP_AUDIT_LINKED:
-                        self.__child_setup_audit(**kwargs)
+                        self.__child_setup_audit(_pmd, **kwargs)
                 elif _pkg_op == pkgdefs.PKG_OP_DETACH:
                         self.__child_setup_detach(_progtrack, **kwargs)
                 elif _pkg_op == pkgdefs.PKG_OP_PUBCHECK:
-                        self.__child_setup_pubcheck(**kwargs)
+                        self.__child_setup_pubcheck(_pmd, **kwargs)
                 elif _pkg_op == pkgdefs.PKG_OP_SYNC:
-                        self.__child_setup_sync(_progtrack,
-                            _ignore_syncmd_nop, _pd, **kwargs)
+                        self.__child_setup_sync(_pmd, _progtrack,
+                            _ignore_syncmd_nop, _syncmd_tmp, **kwargs)
                 elif _pkg_op == pkgdefs.PKG_OP_UPDATE:
-                        self.__child_setup_update(_progtrack,
-                            _ignore_syncmd_nop, _pd, **kwargs)
+                        self.__child_setup_update(_pmd, _progtrack,
+                            _ignore_syncmd_nop, _syncmd_tmp, **kwargs)
                 else:
                         raise RuntimeError(
                             "Unsupported package client op: %s" % _pkg_op)
@@ -3040,6 +3145,177 @@
 
 
 # ---------------------------------------------------------------------------
+# Interfaces to obtain linked image metadata from an image
+#
+def get_pubs(img):
+        """Return publisher information for the specified image.
+
+        Publisher information is returned in a sorted list of lists
+        of the format:
+                <publisher name>, <sticky>
+
+        Where:
+                <publisher name> is a string
+                <sticky> is a boolean
+
+        The tuples are sorted by publisher rank.
+        """
+
+        return [
+            [str(p), p.sticky]
+            for p in img.get_sorted_publishers(inc_disabled=False)
+        ]
+
+def get_packages(img, pd=None):
+        """Figure out the current (or planned) list of packages in img."""
+
+        ppkgs = set(img.get_catalog(img.IMG_CATALOG_INSTALLED).fmris())
+
+        # if there's an image plan the we need to update the installed
+        # packages based on that plan.
+        if pd is not None:
+                for src, dst in pd.plan_desc:
+                        if src == dst:
+                                continue
+                        if src:
+                                assert src in ppkgs
+                                ppkgs -= set([src])
+                        if dst:
+                                assert dst not in ppkgs
+                                ppkgs |= set([dst])
+
+        # paranoia
+        return frozenset(ppkgs)
+
+def get_inheritable_facets(img, pd=None):
+        """Get Facets from an image that a child should inherit.
+
+        We only want to sync facets which affect packages that have parent
+        dependencies on themselves.  In practice this essentially limits us to
+        "facet.version-lock.*" facets."""
+
+        # get installed (or planned) parent packages and facets
+        ppkgs = get_packages(img, pd=pd)
+        facets = img.cfg.facets
+        if pd is not None and pd.new_facets is not None:
+                facets = pd.new_facets
+
+        # create a packages dictionary indexed by package stem.
+        ppkgs_dict = dict([
+                (pfmri.pkg_name, pfmri)
+                for pfmri in ppkgs
+        ])
+
+        #
+        # iterate through all installed (or planned) package incorporation
+        # dependency actions and find those that are affected by image facets.
+        #
+        # we don't check for package-wide facets here because they don't do
+        # anything.  (ie, facets defined via "set" actions in a package have
+        # no effect on other actions within that package.)
+        #
+        faceted_deps = dict()
+        cat = img.get_catalog(img.IMG_CATALOG_KNOWN)
+        for pfmri in ppkgs:
+                for act in cat.get_entry_actions(pfmri, [cat.DEPENDENCY]):
+                        # we're only interested in incorporate dependencies
+                        if act.name != "depend" or \
+                            act.attrs["type"] != "incorporate":
+                                continue
+
+                        # check if any image facets affect this dependency
+                        # W0212 Access to a protected member
+                        # pylint: disable=W0212
+                        matching_facets = facets._action_match(act)
+                        # pylint: enable=W0212
+                        if not matching_facets:
+                                continue
+
+                        # if all the matching facets are true we don't care
+                        # about the match.
+                        if set([i[1] for i in matching_facets]) == set([True]):
+                                continue
+
+                        # save this set of facets.
+                        faceted_deps[act] = matching_facets
+
+        #
+        # For each faceted incorporation dependency, check if it affects a
+        # package that has parent dependencies on itself.  This is really a
+        # best effort in that we don't follow package renames or obsoletions,
+        # etc.
+        #
+        # To limit the number of packages we inspect, we'll try to match the
+        # incorporation dependency fmri targets packages by stem to packages
+        # which are installed (or planned) within the parent image.  This
+        # allows us to quickly get a fully qualified fmri and check against a
+        # package for which we have already downloaded a manifest.
+        #
+        # If we can't match the dependency fmri package stem against packages
+        # installed (or planned) in the parent image, we don't bother
+        # searching for allowable packages in the catalog, because even if we
+        # found them in the catalog and they did have a parent dependency,
+        # they'd all still be uninstallable in any children because there
+        # would be no way to satisfy the parent dependency.  (as we already
+        # stated the package is not installed in the parent.)
+        #
+        faceted_linked_deps = dict()
+        for act in faceted_deps:
+                for fmri in act.attrlist("fmri"):
+                        # the build_release specified below ("5.11") doesn't
+                        # matter since we only create a PkgFmri object so we
+                        # can reference pkg_name
+                        pfmri = pkg.fmri.PkgFmri(fmri, "5.11")
+                        pfmri = ppkgs_dict.get(pfmri.pkg_name, None)
+                        if pfmri is None:
+                                continue
+
+                        # check if this package has a dependency on itself in
+                        # its parent image.
+                        for act2 in cat.get_entry_actions(pfmri,
+                            [cat.DEPENDENCY]):
+                                if act2.name != "depend" or \
+                                    act2.attrs["type"] != "parent":
+                                        continue
+                                if pkg.actions.depend.DEPEND_SELF not in \
+                                    act2.attrlist("fmri"):
+                                        continue
+                                faceted_linked_deps[act] = faceted_deps[act]
+                                break
+        del faceted_deps
+
+        #
+        # Create a set of all facets which affect incorporation dependencies
+        # on synced packages.
+        #
+        # Note that we can't limit ourselves to only passing on facets that
+        # affect dependencies which have been disabled.  Doing this could lead
+        # to incorrect results because facets allow for pattern matching.  So
+        # for example say we had the following dependencies on synced
+        # packages:
+        #
+        #    depend type=incorporation fmri=some_synced_pkg1 facet.123456=true
+        #    depend type=incorporation fmri=some_synced_pkg2 facet.456789=true
+        #
+        # and the following image facets:
+        #
+        #    facet.123456 = True
+        #    facet.*456* = False
+        #
+        # if we only passed through facets which affected disabled packages
+        # we'd just pass through "facet.*456*", but this would result in
+        # disabling both dependencies above, not just the second dependency.
+        #
+        pfacets = pkg.facet.Facets()
+        for facets in faceted_linked_deps.values():
+                for k, v in facets:
+                        # W0212 Access to a protected member
+                        # pylint: disable=W0212
+                        pfacets._set_inherited(k, v)
+
+        return pfacets
+
+# ---------------------------------------------------------------------------
 # Utility Functions
 #
 def save_data(path, data, root="/"):
@@ -3068,12 +3344,12 @@
                 # pylint: disable=W0212
                 raise apx._convert_error(e)
 
-def load_data(path, missing_val=None):
+def load_data(path, missing_ok=False):
         """Load JSON encoded linked image metadata from a file."""
 
         try:
-                if (missing_val != None) and not path_exists(path):
-                        return missing_val
+                if missing_ok and not path_exists(path):
+                        return None
                 fobj = open(path)
                 data = json.load(fobj, encoding="utf-8",
                     object_hook=pkg.client.linkedimage.PkgDecoder)
--- a/src/modules/client/options.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/options.py	Wed Aug 07 15:30:45 2013 -0700
@@ -688,6 +688,7 @@
     opts_table_beopts + \
     opts_table_concurrency + \
     opts_table_li_ignore + \
+    opts_table_li_no_psync + \
     opts_table_no_index + \
     opts_table_nqv + \
     opts_table_parsable + \
@@ -702,6 +703,8 @@
 
 opts_detach_linked = \
     opts_table_force + \
+    opts_table_li_md_only + \
+    opts_table_li_no_pkg_updates + \
     opts_table_li_target + \
     opts_table_nqv + \
     []
--- a/src/modules/client/pkg_solver.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/pkg_solver.py	Wed Aug 07 15:30:45 2013 -0700
@@ -361,7 +361,9 @@
                 if relax_all:
                         relax_pkgs = self.__installed_pkgs
                 else:
-                        relax_pkgs = proposed_pkgs
+                        relax_pkgs = proposed_pkgs | \
+                            self.__relax_parent_self_constrained(excludes,
+                                ignore_inst_parent_deps)
 
                 inc_list, con_lists = self.__get_installed_unbound_inc_list(
                     relax_pkgs, excludes=excludes)
@@ -1635,6 +1637,35 @@
                             (dtype, fmri)),
                             matching)
 
+        def __relax_parent_self_constrained(self, excludes, \
+            ignore_inst_parent_deps):
+                """If we're a child image then we need to relax packages
+                that are dependent upon themselves in the parent image.  This
+                is necessary to keep those packages in sync."""
+
+                relax_pkgs = set()
+
+                # check if we're a child image.
+                if self.__parent_pkgs is None:
+                        return relax_pkgs
+
+                # if we're ignoring parent dependencies there is no reason to
+                # relax install-holds in packages constrained by those
+                # dependencies.
+                if ignore_inst_parent_deps:
+                        return relax_pkgs
+
+                for f in self.__installed_fmris:
+                        for da in self.__get_dependency_actions(f, excludes):
+                                if da.attrs["type"] != "parent":
+                                        continue
+                                if pkg.actions.depend.DEPEND_SELF in \
+                                    da.attrlist("fmri"):
+                                        relax_pkgs.add(f.pkg_name)
+                                        break
+
+                return relax_pkgs
+
         def __generate_dependency_errors(self, fmri_list, excludes=EmptyI):
                 """ Returns a list of strings describing why fmris cannot
                 be installed, or returns an empty list if installation
--- a/src/modules/client/plandesc.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/client/plandesc.py	Wed Aug 07 15:30:45 2013 -0700
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 """
@@ -112,17 +112,16 @@
                     "implementation-version": pkg.version.Version,
                 }
             },
-            "_changed_facets": pkg.facet.Facets,
             "_fmri_changes": [ ( pkg.fmri.PkgFmri, pkg.fmri.PkgFmri ) ],
             "_new_avoid_obs": ( set(), set() ),
-            "_new_facets": pkg.facet.Facets,
             "_new_mediators": collections.defaultdict(set, {
                 str: {
                     "version": pkg.version.Version,
                     "implementation-version": pkg.version.Version,
                 }
             }),
-            "_removed_facets": set(),
+            "_old_facets": pkg.facet.Facets,
+            "_new_facets": pkg.facet.Facets,
             "_rm_aliases": { str: set() },
             "added_groups": { str: pkg.fmri.PkgFmri },
             "added_users": { str: pkg.fmri.PkgFmri },
@@ -130,6 +129,7 @@
             "children_nop": [ li.LinkedImageName ],
             "children_planned": [ li.LinkedImageName ],
             "install_actions": [ _ActionPlan ],
+            "li_pfacets": pkg.facet.Facets,
             "li_ppkgs": frozenset([ pkg.fmri.PkgFmri ]),
             "li_props": { li.PROP_NAME: li.LinkedImageName },
             "pkg_plans": [ pkg.client.pkgplan.PkgPlan ],
@@ -157,9 +157,10 @@
                 self._cfg_mediators = {}
                 self._varcets_change = False
                 self._new_variants = None
-                self._changed_facets = pkg.facet.Facets()
-                self._removed_facets = set()
+                self._old_facets = None
                 self._new_facets = None
+                self._facet_change = False
+                self._masked_facet_change = False
                 self._new_mediators = collections.defaultdict(set)
                 self._mediators_change = False
                 self._new_avoid_obs = (set(), set())
@@ -172,6 +173,7 @@
                 self.li_ppkgs = frozenset()
                 self.li_ppubs = None
                 self.li_props = {}
+                self._li_pkg_updates = True
 
                 #
                 # Properties set when state >= EVALUATED_OK
@@ -198,6 +200,7 @@
                 self._need_boot_archive = None
                 # child properties
                 self.child_op = None
+                self.child_op_implicit = False
                 self.child_kwargs = {}
                 self.children_ignored = None
                 self.children_planned = []
@@ -467,27 +470,88 @@
 
         @property
         def varcets(self):
-                """Returns a tuple of two lists containing the facet and variant
-                changes in this plan."""
+                """Returns a tuple of two lists containing the facet and
+                variant changes in this plan.
+
+                The variant list contains tuples with the following format:
+
+                    (<variant>, <new-value>)
+
+                The facet list contains tuples with the following format:
+
+                    (<facet>, <new-value>, <old-value>, <source>,
+                        <new-masked>, <old-masked>)
+
+                """
+
                 vs = []
                 if self._new_variants:
                         vs = self._new_variants.items()
+
+                # sort results by variant name
+                vs.sort(key=lambda x: x[0])
+
                 fs = []
-                fs.extend(self._changed_facets.items())
-                fs.extend([(f, None) for f in self._removed_facets])
+                if self._new_facets is None:
+                        return (vs, fs)
+
+                # create new dictionaries that index facets by name and
+                # source:
+                #    dict[(<facet, src>)] = (<value>, <masked>)
+                old_facets = dict([
+                    ((f, src), (v, masked))
+                    for f in self._old_facets
+                    # W0212 Access to a protected member
+                    # pylint: disable=W0212
+                    for v, src, masked in self._old_facets._src_values(f)
+                ])
+                new_facets = dict([
+                    ((f, src), (v, masked))
+                    for f in self._new_facets
+                    # W0212 Access to a protected member
+                    # pylint: disable=W0212
+                    for v, src, masked in self._new_facets._src_values(f)
+                ])
+
+                # check for removed facets
+                for f, src in set(old_facets) - set(new_facets):
+                        v, masked = old_facets[f, src]
+                        fs.append((f, None, v, src, masked, False))
+
+                # check for added facets
+                for f, src in set(new_facets) - set(old_facets):
+                        v, masked = new_facets[f, src]
+                        fs.append((f, v, None, src, False, masked))
+
+                # check for changing facets
+                for f, src in set(old_facets) & set(new_facets):
+                        if old_facets[f, src] == new_facets[f, src]:
+                                continue
+                        v_old, m_old = old_facets[f, src]
+                        v_new, m_new = new_facets[f, src]
+                        fs.append((f, v_new, v_old, src, m_old, m_new))
+
+                # sort results by facet name
+                fs.sort(key=lambda x: x[0])
+
                 return (vs, fs)
 
         def get_varcets(self):
                 """Returns a formatted list of strings representing the
                 variant/facet changes in this plan"""
                 vs, fs = self.varcets
-                return list(itertools.chain((
+                rv = [
                     "variant %s: %s" % (name[8:], val)
                     for (name, val) in vs
-                ), (
-                    "  facet %s: %s" % (name[6:], val)
-                    for (name, val) in fs
-                )))
+                ]
+                masked_str = _(" (masked)")
+                for name, v_new, v_old, src, m_old, m_new in fs:
+                        m_old = m_old and masked_str or ""
+                        m_new = m_new and masked_str or ""
+                        msg = "  facet %s (%s): %s%s -> %s%s" % \
+                            (name[6:], src, v_old, m_old, v_new, m_new)
+                        rv.append(msg)
+                return rv
 
         def get_changes(self):
                 """A generation function that yields tuples of PackageInfo
@@ -680,3 +744,11 @@
         def cbytes_avail(self):
                 """Estimated number of bytes available in download cache"""
                 return self._cbytes_avail
+
+        @property
+        def new_facets(self):
+                """If facets are changing, this is the new set of facets being
+                applied."""
+                if self._new_facets is None:
+                        return None
+                return pkg.facet.Facets(self._new_facets)
--- a/src/modules/facet.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/modules/facet.py	Wed Aug 07 15:30:45 2013 -0700
@@ -21,13 +21,13 @@
 #
 
 #
-# Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 # basic facet support
 
 from pkg._varcet import _allow_facet
-from pkg.misc import EmptyI
+from pkg.misc import EmptyI, ImmutableDict
 import fnmatch
 import re
 import types
@@ -37,92 +37,378 @@
         # and maintain ordered list of keys sorted
         # by length.
 
-        # subclass __getitem_ so that queries w/
+        # subclass __getitem__ so that queries w/
         # actual facets find match
 
+        #
+        # Facets can come from three different sources.
+        #
+        # SYSTEM facets are facets whose values are assigned by the system.
+        # These are usually facets defined in packages which are not set in an
+        # image, and the value assigned by the system is always true.  These
+        # facets will usually never be found in a Facets dictionary.  (Facets
+        # dictionaries only contain facets which are explicitly set.)
+        #
+        # LOCAL facets are facets which have been set locally in an image
+        # using pkg(1) or the pkg api.  Explicitly set LOCAL facets are stored
+        # in Facets.__local.  Facets which are not explicitly set but match an
+        # explicitly set LOCAL facet glob pattern are also considered to be
+        # LOCAL.
+        #
+        # PARENT facets are facets which are inherited from a parent image.
+        # they are managed internally by the packaging subsystem.  Explicitly
+        # inherited facets are stored in Facets.__inherited.  Facets which are
+        # not explicitly set but match an explicitly set PARENT facet glob
+        # pattern are also considered to be PARENT.
+        #
+        # When evaluating facets, all PARENT facets are evaluated before LOCAL
+        # facets.  This is done by ensuring that all PARENT facets come before
+        # any LOCAL facets in __keylist.  This is done because PARENT facets
+        # exist to propagate faceted dependencies between linked images, which
+        # is needed to ensure the solver can run successfully.  ie, if a
+        # parent image relaxes dependencies via facet version-locks, then the
+        # child needs to inherit those facets since otherwise it is more
+        # constrained in possible solutions than it's parent and likely won't
+        # be able to plan an update that keeps it in sync with it's parent.
+        #
+        # Sine PARENT facets take priority over LOCAL facets, it's possible to
+        # have conflicts between the two.  In the case where a facet is both
+        # inherited and set locally, both values are preserved, but the
+        # inherited value masks the local value.  Users can list and update
+        # local values while they are masked using pkg(1), but as long as the
+        # values are masked they will not affect image planning operations.
+        # Once an inherited facet that masks a local facet is removed, the
+        # local facet will be restored.
+        #
+
+        FACET_SRC_SYSTEM = "system"
+        FACET_SRC_LOCAL = "local"
+        FACET_SRC_PARENT = "parent"
+
         def __init__(self, init=EmptyI):
                 dict.__init__(self)
                 self.__keylist = []
                 self.__res = {}
-                for i in init:
-                        self[i] = init[i]
+                self.__local = {}
+                self.__local_ro = None
+                self.__inherited = {}
+                self.__inherited_ro = None
+
+                # initialize ourselves
+                self.update(init)
 
         @staticmethod
         def getstate(obj, je_state=None):
                 """Returns the serialized state of this object in a format
                 that that can be easily stored using JSON, pickle, etc."""
-                return dict(obj)
+
+                return [
+                        [k, v, True]
+                        for k, v in obj.__inherited.iteritems()
+                ] + [
+                        [k, v, False]
+                        for k, v in obj.__local.iteritems()
+                ]
 
         @staticmethod
         def fromstate(state, jd_state=None):
                 """Update the state of this object using previously serialized
                 state obtained via getstate()."""
-                return Facets(init=state)
+
+                rv = Facets()
+                for k, v, inhertited in state:
+                        if not inhertited:
+                                rv[k] = v
+                        else:
+                                rv._set_inherited(k, v)
+                return rv
+
+        def _cmp_priority(self, other):
+                """Compare the facet match priority of two Facets objects.
+                Since the match priority of a Facets object is dependent upon
+                facet sources (local vs parent) and names, we're essentially
+                ensuring that both objects have the same set of facet sources
+                and names."""
+
+                assert type(other) is Facets
+                return cmp(self.__keylist, other.__keylist)
+
+        def _cmp_values(self, other):
+                """Compare the facet values of two Facets objects.  This
+                comparison ignores any masked values."""
+
+                assert type(other) is Facets
+                return dict.__cmp__(self, other)
+
+        def _cmp_all_values(self, other):
+                """Compare all the facet values of two Facets objects.  This
+                comparison takes masked values into account."""
+
+                assert type(other) is Facets
+                rv = cmp(self.__inherited, other.__inherited)
+                if rv == 0:
+                        rv = cmp(self.__local, other.__local)
+                return rv
+
+        def __cmp__(self, other):
+                """Compare two Facets objects.  This comparison takes masked
+                values into account."""
+
+                # check if we're getting compared against something other than
+                # another Factes object.
+                if type(other) is not Facets:
+                        return 1
+
+                # Check for effective facet value changes that could affect
+                # solver computations.
+                rv = self._cmp_values(other)
+                if rv != 0:
+                        return rv
+
+                # Check for facet priority changes that could affect solver
+                # computations.  (Priority changes can occur when local or
+                # inherited facets are added or removed.)
+                rv = self._cmp_priority(other)
+                if rv != 0:
+                        return rv
+
+                # There are no outwardly visible facet priority or value
+                # changes that could affect solver computations, but it's
+                # still possible that we're changing the set of local or
+                # inherited facets in a way that doesn't affect solver
+                # computations.  For example:  we could be adding a local
+                # facet with a value that is masked by an inherited facet, or
+                # having a facet transition from being inherited to being
+                # local without a priority or value change.  Check if this is
+                # the case.
+                rv = self._cmp_all_values(other)
+                return rv
+
+        def __eq__(self, other):
+                """redefine in terms of __cmp__()"""
+                return (Facets.__cmp__(self, other) == 0)
+
+        def __ne__(self, other):
+                """redefine in terms of __cmp__()"""
+                return (Facets.__cmp__(self, other) != 0)
+
+        def __ge__(self, other):
+                """redefine in terms of __cmp__()"""
+                return (Facets.__cmp__(self, other) >= 0)
+
+        def __gt__(self, other):
+                """redefine in terms of __cmp__()"""
+                return (Facets.__cmp__(self, other) > 0)
+
+        def __le__(self, other):
+                """redefine in terms of __cmp__()"""
+                return (Facets.__cmp__(self, other) <= 0)
+
+        def __lt__(self, other):
+                """redefine in terms of __cmp__()"""
+                return (Facets.__cmp__(self, other) < 0)
 
         def __repr__(self):
                 s =  "<"
-                s += ", ".join(["%s:%s" % (k, dict.__getitem__(self, k)) for k in self.__keylist])
+                s += ", ".join([
+                    "%s:%s" % (k, dict.__getitem__(self, k))
+                    for k in self.__keylist
+                ])
                 s += ">"
 
                 return s
 
-        def __setitem__(self, item, value):
+        def __keylist_sort(self):
+                """Update __keysort, which is used to determine facet matching
+                order.  Inherited facets always take priority over local
+                facets so make sure all inherited facets come before local
+                facets in __keylist."""
+
+                def facet_sort(x, y):
+                        return len(y) - len(x)
+
+                self.__keylist = []
+                self.__keylist += sorted([
+                        i
+                        for i in self
+                        if i in self.__inherited
+                ], cmp=facet_sort)
+                self.__keylist += sorted([
+                        i
+                        for i in self
+                        if i not in self.__inherited
+                ], cmp=facet_sort)
+
+        def __setitem_internal(self, item, value, inherited=False):
                 if not item.startswith("facet."):
                         raise KeyError, 'key must start with "facet".'
 
                 if not (value == True or value == False):
                         raise ValueError, "value must be boolean"
 
-                if item not in self:
-                        self.__keylist.append(item)
-                        self.__keylist.sort(cmp=lambda x, y: len(y) - len(x))
-                dict.__setitem__(self, item, value)
-                self.__res[item] = re.compile(fnmatch.translate(item))
+                keylist_sort = False
+                if (inherited and item not in self.__inherited) or \
+                    (not inherited and item not in self):
+                        keylist_sort = True
+
+                # save the facet in the local or inherited dictionary
+                # clear the corresponding read-only dictionary
+                if inherited:
+                        self.__inherited[item] = value
+                        self.__inherited_ro = None
+                else:
+                        self.__local[item] = value
+                        self.__local_ro = None
+
+                # Inherited facets always take priority over local facets.
+                if inherited or item not in self.__inherited:
+                        dict.__setitem__(self, item, value)
+                        self.__res[item] = re.compile(fnmatch.translate(item))
 
+                if keylist_sort:
+                        self.__keylist_sort()
 
-        def __getitem__(self, item):
-                """implement facet lookup algorithm here"""
-                # Note that _allow_facet bypasses __getitem__ for performance
-                # reasons; if __getitem__ changes, _allow_facet in _varcet.c
-                # must also be updated.
+        def __setitem__(self, item, value):
+                """__setitem__ only operates on local facets."""
+                self.__setitem_internal(item, value)
+
+        def __getitem_internal(self, item):
+                """Implement facet lookup algorithm here
+
+                Note that _allow_facet bypasses __getitem__ for performance
+                reasons; if __getitem__ changes, _allow_facet in _varcet.c
+                must also be updated.
+
+                We return a tuple of the form (<key>, <value>) where key is
+                the explicitly set facet name (which may be a glob pattern)
+                that matched the caller specific facet name."""
+
                 if not item.startswith("facet."):
                         raise KeyError, "key must start w/ facet."
 
                 if item in self:
-                        return dict.__getitem__(self, item)
+                        return item, dict.__getitem__(self, item)
                 for k in self.__keylist:
                         if self.__res[k].match(item):
-                                return dict.__getitem__(self, k)
+                                return k, dict.__getitem__(self, k)
+
+                return None, True # be inclusive
+
+        def __getitem__(self, item):
+                return self.__getitem_internal(item)[1]
+
+        def __delitem_internal(self, item, inherited=False):
+
+                # check for an attempt to delete an invalid facet
+                if not dict.__contains__(self, item):
+                        raise KeyError, item
+
+                # check for an attempt to delete an invalid local facet
+                if not inherited and item not in self.__local:
+                        raise KeyError, item
+
+                # we should never try to delete an invalid inherited facet
+                assert not inherited or item in self.inherited
 
-                return True # be inclusive
+                keylist_sort = False
+                if inherited and item in self.__local:
+                        # the inherited value was overriding a local value
+                        # that should now be exposed
+                        dict.__setitem__(self, item, self.__local[item])
+                        self.__res[item] = re.compile(fnmatch.translate(item))
+                        keylist_sort = True
+                else:
+                        # delete the item
+                        dict.__delitem__(self, item)
+                        del self.__res[item]
+                        self.__keylist.remove(item)
+
+                # delete item from the local or inherited dictionary
+                # clear the corresponding read-only dictionary
+                if inherited:
+                        rv = self.__inherited[item]
+                        del self.__inherited[item]
+                        self.__inherited_ro = None
+                else:
+                        rv = self.__local[item]
+                        del self.__local[item]
+                        self.__local_ro = None
+
+                if keylist_sort:
+                        self.__keylist_sort()
+                return rv
 
         def __delitem__(self, item):
-                dict.__delitem__(self, item)
-                self.__keylist.remove(item)
-                del self.__res[item]
+                """__delitem__ only operates on local facets."""
+                self.__delitem_internal(item, value)
 
         # allow_action is provided as a native function (see end of class
         # declaration).
 
+        def _set_inherited(self, item, value):
+                """Set an inherited facet."""
+                self.__setitem_internal(item, value, inherited=True)
+
+        def _clear_inherited(self):
+                """Clear all inherited facet."""
+                for k in self.__inherited.keys():
+                        self.__delitem_internal(k, inherited=True)
+
+        def _action_match(self, act):
+                """Find the subset of facet key/values pairs which match any
+                facets present on an action."""
+
+                # find all the facets present in the current action
+                action_facets = frozenset([
+                        a
+                        for a in act.attrs
+                        if a.startswith("facet.")
+                ])
+
+                rv = set()
+                for facet in self.__keylist:
+                        if facet in action_facets:
+                                # we found a matching facet.
+                                rv.add((facet, self[facet]))
+                                continue
+                        for action_facet in action_facets:
+                                if self.__res[facet].match(action_facet):
+                                        # we found a matching facet.
+                                        rv.add((facet, self[facet]))
+                                        break
+
+                return (frozenset(rv))
+
         def pop(self, item, *args, **kwargs):
+                """pop() only operates on local facets."""
+
                 assert len(args) == 0 or (len(args) == 1 and
                     "default" not in kwargs)
-                try:
-                        self.__keylist.remove(item)
-                        del self.__res[item]
-                except ValueError:
-                        if not args and "default" not in kwargs:
-                                raise
-                default = kwargs.get("default", None)
-                if args:
-                        default = args[0]
-                return dict.pop(self, item, default)
+
+                if item not in self.__local:
+                        # check if the user specified a default value
+                        if args:
+                                return args[0]
+                        elif "default" in kwargs:
+                                return kwargs["default"]
+                        if len(self) == 0:
+                                raise KeyError, 'pop(): dictionary is empty'
+                        raise KeyError, item
+
+                return self.__delitem_internal(item, inherited=False)
 
         def popitem(self):
-                popped = dict.popitem(self)
-                self.__keylist.remove(popped[0])
-                del self.__res[popped]
-                return popped
+                """popitem() only operates on local facets."""
+
+                item = None
+                for item, value in self.__local:
+                        break
+
+                if item is None:
+                        raise KeyError, 'popitem(): dictionary is empty'
+
+                self.__delitem_internal(item)
+                return (item, value)
 
         def setdefault(self, item, default=None):
                 if item not in self:
@@ -130,8 +416,16 @@
                 return self[item]
 
         def update(self, d):
-                for k, v in d.iteritems():
-                        self[k] = v
+                if type(d) == Facets:
+                        # preserve inherited facets.
+                        for k, v in d.__inherited.iteritems():
+                                self._set_inherited(k, v)
+                        for k, v in d.__local.iteritems():
+                                self[k] = v
+                        return
+
+                for k in d:
+                        self[k] = d[k]
 
         def keys(self):
                 return self.__keylist[:]
@@ -139,6 +433,29 @@
         def values(self):
                 return [self[k] for k in self.__keylist]
 
+        def _src_values(self, name):
+                """A facet may be set via multiple sources and hence have
+                multiple values.  If there are multiple values for a facet,
+                all but one of those values will be masked.  So for a given
+                facet, return a list of tuples of the form (<value>, <src>,
+                <masked>) which represent all currently set values for this
+                facet."""
+
+                rv = []
+                if name in self.__inherited:
+                        src = self.FACET_SRC_PARENT
+                        value = self.__inherited[name]
+                        masked = False
+                        rv.append((value, src, masked))
+                if name in self.__local:
+                        src = self.FACET_SRC_LOCAL
+                        value = self.__local[name]
+                        masked = False
+                        if name in self.__inherited:
+                                masked = True
+                        rv.append((value, src, masked))
+                return rv
+
         def items(self):
                 return [a for a in self.iteritems()]
 
@@ -151,7 +468,38 @@
 
         def clear(self):
                 self.__keylist = []
-                self.__res = []
+                self.__res = {}
+                self.__local = {}
+                self.__local_ro = None
+                self.__inherited = {}
+                self.__inherited_ro = None
                 dict.clear(self)
 
+        def _match_src(self, name):
+                """Report the source of a facet value if we were to attempt to
+                look it up in the current Facets object dictionary."""
+
+                k = self.__getitem_internal(name)[0]
+                if k in self.__inherited:
+                        return self.FACET_SRC_PARENT
+                if k in self.__local:
+                        return self.FACET_SRC_LOCAL
+                assert k is None and k not in self
+                return self.FACET_SRC_SYSTEM
+
+        # For convenience, provide callers with direct access to local and
+        # parent facets via cached read-only dictionaries.
+        @property
+        def local(self):
+                if self.__local_ro is None:
+                        self.__local_ro = ImmutableDict(self.__local)
+                return self.__local_ro
+
+        @property
+        def inherited(self):
+                if self.__inherited_ro is None:
+                        self.__inherited_ro = ImmutableDict(self.__inherited)
+                return self.__inherited_ro
+
+
 Facets.allow_action = types.MethodType(_allow_facet, None, Facets)
--- a/src/po/POTFILES.in	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/po/POTFILES.in	Wed Aug 07 15:30:45 2013 -0700
@@ -38,6 +38,7 @@
 modules/client/linkedimage/__init__.py
 modules/client/options.py
 modules/client/pkg_solver.py
+modules/client/plandesc.py
 modules/client/progress.py
 modules/client/publisher.py
 modules/client/transport/repo.py
--- a/src/tests/api/t_linked_image.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/tests/api/t_linked_image.py	Wed Aug 07 15:30:45 2013 -0700
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -796,7 +796,7 @@
                 api_objs[1].gen_plan_change_varcets(
                     variants={"variant.foo": "baz"})
 
-                # install a synced package into 1
+                # install a synced package into 0
                 self._api_install(api_objs[0], [self.p_sync1_name[2]],
                     li_ignore=[])
 
@@ -978,10 +978,12 @@
                         [self.p_sync1_name_gen])
 
                 # test operations on child nodes
-                rvdict = {1: EXIT_NOP, 2: EXIT_OOPS, 3: EXIT_OOPS,
+                rvdict = {1: EXIT_OK, 2: EXIT_OOPS, 3: EXIT_OOPS,
                     4: EXIT_OOPS}
                 self._children_op(0, [], "sync_linked_children",
                     rvdict=rvdict)
+                rvdict = {1: EXIT_NOP, 2: EXIT_OOPS, 3: EXIT_OOPS,
+                    4: EXIT_OOPS}
                 self._children_op(0, [1, 2, 3, 4], "sync_linked_children",
                     rvdict=rvdict)
 
--- a/src/tests/cli/t_change_facet.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/tests/cli/t_change_facet.py	Wed Aug 07 15:30:45 2013 -0700
@@ -146,7 +146,8 @@
                 self.pkg("change-facet -n --parsable=0 wombat=false")
                 self.assertEqualParsable(self.output,
                     affect_packages=[],
-                    change_facets=[["facet.wombat", False]])
+                    change_facets=[["facet.wombat", False, None, 'local',
+                       False, False]])
 
                 # Again, but this time after removing the publisher cache data
                 # and as an unprivileged user to verify that cached manifest
@@ -158,7 +159,8 @@
                     "wombat=false", su_wrap=True)
                 self.assertEqualParsable(self.output,
                     affect_packages=[],
-                    change_facets=[["facet.wombat", False]])
+                    change_facets=[["facet.wombat", False, None, 'local',
+                        False, False]])
 
                 # Again, but this time after removing the cache directory
                 # entirely.
@@ -169,14 +171,16 @@
                     "wombat=false", su_wrap=True)
                 self.assertEqualParsable(self.output,
                     affect_packages=[],
-                    change_facets=[["facet.wombat", False]])
+                    change_facets=[["facet.wombat", False, None, 'local',
+                        False, False]])
 
                 # change to pick up another file w/ two tags and test the
                 # parsable output
                 self.pkg("change-facet --parsable=0 facet.locale.nl_ZA=True")
                 self.assertEqualParsable(self.output,
                     affect_packages=alist,
-                    change_facets=[["facet.locale.nl_ZA", True]])
+                    change_facets=[["facet.locale.nl_ZA", True, None, 'local',
+                        False, False]])
                 self.pkg("verify")
                 self.pkg("facet")
 
@@ -195,9 +199,11 @@
                 self.assertEqualParsable(self.output,
                     affect_packages=alist,
                     change_facets=[
-                        ["facet.locale*", None],
-                        ["facet.locale.fr*", None],
-                        ["facet.locale.fr_CA", None]
+                        ["facet.locale*", None, False, 'local', False, False],
+                        ["facet.locale.fr*", None, True, 'local', False,
+                            False],
+                        ["facet.locale.fr_CA", None, False, 'local', False,
+                            False]
                     ])
                 self.pkg("verify")
 
@@ -225,9 +231,10 @@
                 self.assertEqualParsable(self.output,
                     affect_packages=alist,
                     change_facets=[
-                        ["facet.locale*", None],
-                        ["facet.locale.*", False],
-                        ["facet.locale.fr_CA", True]
+                        ["facet.locale*", None, False, 'local', False, False],
+                        ["facet.locale.*", False, None, 'local', False, False],
+                        ["facet.locale.fr_CA", True, None, 'local', False,
+                            False]
                     ])
                 self.assert_file_is_there("4")
 
@@ -238,9 +245,11 @@
                 self.assertEqualParsable(self.output,
                     affect_packages=alist,
                     change_facets=[
-                        ["facet.locale.*", None],
-                        ["facet.locale.fr_*", False],
-                        ["facet.locale.fr_CA", None]
+                        ["facet.locale.*", None, False, 'local', False, False],
+                        ["facet.locale.fr_*", False, None, 'local', False,
+                            False],
+                        ["facet.locale.fr_CA", None, True, 'local', False,
+                            False]
                     ])
                 self.assert_file_is_there("4")
 
@@ -253,10 +262,10 @@
                 # it works.
                 self.pkg("change-facet -v foo=True")
                 self.pkg("facet -H -F tsv")
-                self.assertEqual("facet.foo\tTrue\n", self.output)
+                self.assertEqual("facet.foo\tTrue\tlocal\n", self.output)
                 self.pkg("change-facet --parsable=0 foo=None")
                 self.assertEqualParsable(self.output, change_facets=[
-                    ["facet.foo", None]])
+                    ["facet.foo", None, True, 'local', False, False]])
                 self.pkg("facet -H")
                 self.assertEqual("", self.output)
 
@@ -293,8 +302,8 @@
                 self.pkg("install [email protected]")
                 self.pkg("facet -H -F tsv")
                 expected = (
-                    "facet.locale.fr\tFalse\n"
-                    "facet.locale.fr_FR\tFalse\n")
+                    "facet.locale.fr\tFalse\tlocal\n"
+                    "facet.locale.fr_FR\tFalse\tlocal\n")
                 self.assertEqualDiff(expected, self.output)
                 for i in [ 0, 3, 4, 5, 6, 7 ]:
                         self.assert_file_is_there(str(i))
@@ -307,8 +316,8 @@
                 self.pkg("update")
                 self.pkg("facet -H -F tsv")
                 expected = (
-                    "facet.locale.fr\tFalse\n"
-                    "facet.locale.fr_FR\tFalse\n")
+                    "facet.locale.fr\tFalse\tlocal\n"
+                    "facet.locale.fr_FR\tFalse\tlocal\n")
                 self.assertEqualDiff(expected, self.output)
                 for i in [ 0, 3, 4, 5, 6, 7 ]:
                         self.assert_file_is_there(str(i))
@@ -334,7 +343,7 @@
                 self.pkg("facet -H -F tsv")
                 output = self.reduceSpaces(self.output)
                 expected = (
-                    "facet.locale.fr\tFalse\n")
+                    "facet.locale.fr\tFalse\tlocal\n")
                 self.assertEqualDiff(expected, output)
                 for i in [ 0, 2, 3, 4, 5, 6, 7 ]:
                         self.assert_file_is_there(str(i))
@@ -346,8 +355,8 @@
                 self.pkg("change-facet -v locale.fr_FR=False")
                 self.pkg("facet -H -F tsv")
                 expected = (
-                    "facet.locale.fr\tFalse\n"
-                    "facet.locale.fr_FR\tFalse\n")
+                    "facet.locale.fr\tFalse\tlocal\n"
+                    "facet.locale.fr_FR\tFalse\tlocal\n")
                 self.assertEqualDiff(expected, self.output)
                 for i in [ 0, 3, 4, 5, 6, 7 ]:
                         self.assert_file_is_there(str(i))
@@ -361,8 +370,8 @@
                 self.pkg("facet -H -F tsv")
                 output = self.reduceSpaces(self.output)
                 expected = (
-                    "facet.locale.fr_FR\tFalse\n"
-                    "facet.locale.nl\tFalse\n")
+                    "facet.locale.fr_FR\tFalse\tlocal\n"
+                    "facet.locale.nl\tFalse\tlocal\n")
                 self.assertEqualDiff(expected, output)
                 for i in [ 0, 1, 3, 4, 6, 7 ]:
                         self.assert_file_is_there(str(i))
@@ -376,7 +385,7 @@
                 self.pkg("facet -H -F tsv")
                 output = self.reduceSpaces(self.output)
                 expected = (
-                    "facet.locale.fr_FR\tFalse\n")
+                    "facet.locale.fr_FR\tFalse\tlocal\n")
                 self.assertEqualDiff(expected, output)
                 for i in [ 0, 1, 3, 4, 5, 6, 7 ]:
                         self.assert_file_is_there(str(i))
--- a/src/tests/cli/t_pkg_linked.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/tests/cli/t_pkg_linked.py	Wed Aug 07 15:30:45 2013 -0700
@@ -39,6 +39,7 @@
 
 import pkg.actions
 import pkg.client.image as image
+import pkg.fmri as fmri
 
 from pkg.client.pkgdefs import *
 
@@ -88,8 +89,8 @@
                 p_all.append(p_data)
 
         # generate packages that do need to be synced
-        p_sunc1_name_gen = "sync1"
-        pkgs = [p_sunc1_name_gen + ver for ver in p_vers]
+        p_sync1_name_gen = "sync1"
+        pkgs = [p_sync1_name_gen + ver for ver in p_vers]
         p_sync1_name = dict(zip(range(len(pkgs)), pkgs))
         for i in p_sync1_name:
                 p_data = "open %s\n" % p_sync1_name[i]
@@ -114,7 +115,7 @@
                     add set name=variant.foo value=bar value=baz
                     add file tmp/bar mode=0555 owner=root group=bin path=sync2_bar variant.foo=bar
                     add file tmp/baz mode=0555 owner=root group=bin path=sync2_baz variant.foo=baz
-                     close\n"""
+                    close\n"""
                 p_all.append(p_data)
 
         def setUp(self):
@@ -241,7 +242,8 @@
                 # Ensure 'coverage' is turned off-- it won't work.
                 self.cmdline_run("%s" % args, exit=rv, coverage=False)
 
-        def _pkg(self, il, cmd, args=None, rv=None, rvdict=None):
+        def _pkg(self, il, cmd, args=None, rv=None, rvdict=None,
+            output_cb=None, env_arg=None):
                 assert type(il) == list
                 assert type(cmd) == str
                 assert args == None or type(args) == str
@@ -267,7 +269,9 @@
                 for i in il:
                         rv = rvdict.get(i, EXIT_OK)
                         self.pkg("-R %s %s %s" % (self.i_path[i], cmd, args),
-                            exit=rv)
+                            exit=rv, env_arg=env_arg)
+                        if output_cb:
+                                output_cb(self.output)
 
         def _pkg_child(self, i, cl, cmd, args=None, rv=None, rvdict=None):
                 assert type(i) == int
@@ -346,6 +350,9 @@
                             (args, self.i_name[c], self.i_path[c]),
                             rv=rv)
 
+        def _assertEqual_cb(self, output):
+                return lambda x: self.assertEqual(output, x)
+
 
 class TestPkgLinked1(TestPkgLinked):
         def test_not_linked(self):
@@ -1064,9 +1071,6 @@
                 self._pkg_child_all(0, "audit-linked")
 
 
-class TestPkgLinked2(TestPkgLinked):
-        """Class used solely to split up the test suite for parallelization."""
-
         def test_audit_diverged_1(self):
                 self._imgs_create(4)
 
@@ -1097,6 +1101,9 @@
                 self._pkg_child(0, [1, 2, 3], "audit-linked", rv=rv)
                 self._pkg_child_all(0, "audit-linked", rv=rv)
 
+class TestPkgLinked2(TestPkgLinked):
+        """Class used solely to split up the test suite for parallelization."""
+
         def test_sync_fail(self):
                 self._imgs_create(3)
 
@@ -1233,93 +1240,300 @@
                 self._pkg([1, 2], "list -v %s" % self.p_sync1_name[1])
                 self._pkg([1, 2], "list -v %s" % self.p_foo1_name[2])
 
-        def test_sync_2_via_image_update(self):
-                self._imgs_create(3)
-
-                # install different synced package into each image
-                self._pkg([0], "install -v %s" % self.p_sync1_name[1])
-                self._pkg([1, 2], "install -v %s" % self.p_sync1_name[2])
-
-                # install unsynced packages to make sure they are updated
+        def __test_linked_sync_via_child_op(self, op, op_args, **kwargs):
+                """Verify that if we do a operation "op" on a child image, it
+                automatically brings its packages in sync with its parent.
+
+                We perform operation on three child images.  1 is a push
+                child, 2 and 3 are pull children.  1 and 2 have their linked
+                image metadata in sync with the parent.  3 has its metadata
+                out of sync with the parent and is expected to sync its own
+                metadata."""
+
+                # create parent (0), push child (1), and pull child (2, 3)
+                self._imgs_create(4)
+                self._attach_child(0, [1])
+                self._attach_parent([2, 3], 0)
+
+                # install synced package into each image
+                self._pkg([0, 1, 2, 3], "install -v %s" % self.p_sync1_name[2])
+
+                # install unsynced packages
                 self._pkg([0], "install -v %s" % self.p_foo1_name[1])
-                self._pkg([1, 2], "install -v %s" % self.p_foo1_name[2])
-
-                # use --linked-md-only so we don't install constraints package
-                self._attach_child(0, [1], args="--linked-md-only")
-                self._attach_parent([2], 0, args="--linked-md-only")
-
-                # plan sync
-                self._pkg([1, 2], "image-update -vn")
-                self._pkg([1, 2], "audit-linked", rv=EXIT_DIVERGED)
-
-                # sync child
-                self._pkg([1, 2], "image-update --parsable=0")
-                self.assertEqualParsable(self.output, change_packages=[
-                    [self.foo1_list[2], self.foo1_list[0]],
-                    [self.s1_list[2], self.s1_list[1]]])
-                self._pkg([1, 2], "audit-linked")
-                self._pkg([1, 2], "image-update -v", rv=EXIT_NOP)
-                self._pkg([1, 2], "sync-linked -v", rv=EXIT_NOP)
-
-                # check unsynced packages
-                self._pkg([1, 2], "list -v %s" % self.p_foo1_name[0])
-
-        def test_sync_2_via_install(self):
+                self._pkg([1, 2, 3], "install -v %s" % self.p_foo1_name[2])
+
+                # update the parent image while ignoring the children (there
+                # by putting them out of sync)
+                self._pkg([0], "install -I -v %s" % self.p_sync1_name[1])
+
+                # explicitly sync metadata in children 1 and 2
+                self._pkg([0], "sync-linked -a --linked-md-only")
+                self._pkg([2], "sync-linked --linked-md-only")
+
+                # plan op
+                self._pkg([1, 2, 3], "%s -nv %s" % (op, op_args))
+
+                # verify child images are still diverged
+                self._pkg([1, 2, 3], "audit-linked", rv=EXIT_DIVERGED)
+                self._pkg([0], "audit-linked -a", rv=EXIT_DIVERGED)
+
+                # verify child 3 hasn't updated its metadata
+                # (it still thinks it's in sync)
+                self._pkg([3], "audit-linked --no-parent-sync")
+
+                # execute op
+                def output_cb(output):
+                        self.assertEqualParsable(output, **kwargs)
+                self._pkg([1, 2, 3], "%s --parsable=0 %s" % (op, op_args),
+                    output_cb=output_cb)
+
+                # verify sync via audit and sync (which should be a noop)
+                self._pkg([1, 2, 3], "audit-linked")
+                self._pkg([1, 2, 3], "sync-linked -v", rv=EXIT_NOP)
+                self._pkg([0], "audit-linked -a")
+                self._pkg([0], "sync-linked -a", rv=EXIT_NOP)
+
+        def __test_linked_sync_via_parent_op(self, op, op_args,
+            li_md_change=True, **kwargs):
+                """Verify that if we do a operation "op" on a parent image, it
+                recurses into its children and brings them into sync.
+
+                We perform operation on two child images.  both are push
+                children.  1 has its linked image metadata in sync with the
+                parent.  2 has its linked image metadata out of in sync with
+                the parent and that metadata should get updated during the
+                operation.
+
+                Note that if the metadata in a child image is in sync with its
+                parent, a recursive operation that isn't changing that
+                metadata will assume that the child is already in sync and
+                that we don't need to recurse into it.  This optimization
+                occurs regardless of if the child image is actually in sync
+                with that metadata (a child can be out of sync with its
+                stored metadata if we do a metadata only update)."""
+
+                # create parent (0), push child (1, 2)
                 self._imgs_create(3)
-
-                # install different synced package into each image
-                self._pkg([0], "install -v %s" % self.p_sync1_name[1])
-                self._pkg([1, 2], "install -v %s" % self.p_sync1_name[2])
-
-                # install unsynced packages to make sure they aren't molested
+                self._attach_child(0, [1, 2])
+
+                # install synced package into each image
+                self._pkg([0, 1, 2], "install -v %s" % self.p_sync1_name[2])
+
+                # install unsynced packages
                 self._pkg([0], "install -v %s" % self.p_foo1_name[1])
                 self._pkg([1, 2], "install -v %s" % self.p_foo1_name[2])
 
-                # use --linked-md-only so we don't install constraints package
-                self._attach_child(0, [1], args="--linked-md-only")
-                self._attach_parent([2], 0, args="--linked-md-only")
-
-                # plan sync
-                self._pkg([1, 2], "install -vn %s" % self.p_sync1_name[1])
-                self._pkg([1, 2], "audit-linked", rv=EXIT_DIVERGED)
-
-                # sync child
-                self._pkg([1, 2], "install -v %s" % self.p_sync1_name[1])
-                self._pkg([1, 2], "audit-linked")
-                self._pkg([1, 2], "install -v %s" % self.p_sync1_name[1],
-                    rv=EXIT_NOP)
-                self._pkg([1, 2], "sync-linked -v", rv=EXIT_NOP)
-
-                # check unsynced packages
-                self._pkg([1, 2], "list -v %s" % self.p_foo1_name[2])
-
-        def test_no_sync_2_via_change_variant(self):
-                self._imgs_create(3)
-
-                # install different synced package into each image
-                self._pkg([0], "install -v %s" % self.p_sync1_name[1])
-                self._pkg([1, 2], "install -v %s" % self.p_sync1_name[2])
-
-                # install unsynced packages to make sure they aren't molested
-                self._pkg([0], "install -v %s" % self.p_foo1_name[1])
-                self._pkg([1, 2], "install -v %s" % self.p_foo1_name[2])
-
-                # use --linked-md-only so we don't install constraints package
-                self._attach_child(0, [1], args="--linked-md-only")
-                self._attach_parent([2], 0, args="--linked-md-only")
-
-                # plan sync
-                self._pkg([1, 2], "change-variant -vn variant.foo=baz")
-                self._pkg([1, 2], "audit-linked", rv=EXIT_DIVERGED)
-
-                # sync child
-                self._pkg([1, 2], "change-variant -v variant.foo=baz")
-                self._pkg([1, 2], "audit-linked", rv=EXIT_DIVERGED)
-                self._pkg([1, 2], "change-variant -v variant.foo=baz",
-                    rv=EXIT_NOP)
-
-                # check unsynced packages
-                self._pkg([1, 2], "list -v %s" % self.p_foo1_name[2])
+                # update the parent image while ignoring the children (there
+                # by putting them out of sync)
+                self._pkg([0], "install -I -v %s" % self.p_sync1_name[1])
+
+                # explicitly sync metadata in child 1
+                self._pkg([0], "sync-linked --linked-md-only -l %s" %
+                    self.i_name[1])
+
+                # plan op
+                self._pkg([0], "%s -nv %s" % (op, op_args))
+
+                # verify child images are still diverged
+                self._pkg([1], "audit-linked", rv=EXIT_DIVERGED)
+                self._pkg([0], "audit-linked -a", rv=EXIT_DIVERGED)
+
+                # verify child 2 hasn't updated its metadata
+                # (it still thinks it's in sync)
+                self._pkg([2], "audit-linked")
+
+                # execute op
+                def output_cb(output):
+                        self.assertEqualParsable(output, **kwargs)
+                self._pkg([0], "%s --parsable=0 %s" % (op, op_args),
+                    output_cb=output_cb)
+
+                # verify sync via audit and sync (which should be a noop)
+                # if the linked image metadata was changed during this
+                # operation we should have updated both children.  if linked
+                # image metadata was not changed, we'll only have updated one
+                # child.
+                if li_md_change:
+                        synced_children=[1, 2]
+                else:
+                        synced_children=[2]
+                for i in synced_children:
+                        self._pkg([i], "audit-linked")
+                        self._pkg([i], "sync-linked", rv=EXIT_NOP)
+                        self._pkg([0], "audit-linked -l %s" % self.i_name[i])
+                        self._pkg([0], "sync-linked -l %s" % self.i_name[i],
+                            rv=EXIT_NOP)
+
+        def test_linked_sync_via_update(self):
+                """Verify that if we update child images to be in sync with
+                their constraints when we do an update."""
+
+                self.__test_linked_sync_via_child_op(
+                    "update", "",
+                    change_packages=[
+                        [self.foo1_list[2], self.foo1_list[0]],
+                        [self.s1_list[2], self.s1_list[1]]])
+
+                self.__test_linked_sync_via_parent_op(
+                    "update", "",
+                    change_packages=[
+                        [self.foo1_list[1], self.foo1_list[0]],
+                        [self.s1_list[1], self.s1_list[0]]],
+                    child_images=[{
+                        "image_name": "system:img1",
+                        "change_packages": [
+                            [self.foo1_list[2], self.foo1_list[0]],
+                            [self.s1_list[2], self.s1_list[0]]],
+                        },{
+                        "image_name": "system:img2",
+                        "change_packages": [
+                            [self.foo1_list[2], self.foo1_list[0]],
+                            [self.s1_list[2], self.s1_list[0]]],
+                    }])
+
+        def test_linked_sync_via_update_pkg(self):
+                """Verify that if we update child images to be in sync with
+                their constraints when we do an update of a specific
+                package."""
+
+                self.__test_linked_sync_via_child_op(
+                    "update", self.p_foo1_name[3],
+                    change_packages=[
+                        [self.foo1_list[2], self.foo1_list[3]],
+                        [self.s1_list[2], self.s1_list[1]]])
+
+                self.__test_linked_sync_via_parent_op(
+                    "update", self.p_foo1_name[3],
+                    change_packages=[
+                        [self.foo1_list[1], self.foo1_list[3]]],
+                    child_images=[{
+                        "image_name": "system:img1",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                        },{
+                        "image_name": "system:img2",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                    }])
+
+        def test_linked_sync_via_install(self):
+                """Verify that if we update child images to be in sync with
+                their constraints when we do an install."""
+
+                self.__test_linked_sync_via_child_op(
+                    "install", self.p_foo1_name[1],
+                    change_packages=[
+                        [self.foo1_list[2], self.foo1_list[1]],
+                        [self.s1_list[2], self.s1_list[1]]])
+
+                self.__test_linked_sync_via_parent_op(
+                    "install", self.p_foo1_name[0],
+                    change_packages=[
+                        [self.foo1_list[1], self.foo1_list[0]],
+                    ],
+                    child_images=[{
+                        "image_name": "system:img1",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                        },{
+                        "image_name": "system:img2",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                    }])
+
+        def test_linked_sync_via_sync(self):
+                """Verify that if we update child images to be in sync with
+                their constraints when we do a sync-linked."""
+
+                self.__test_linked_sync_via_child_op(
+                    "sync-linked", "",
+                    change_packages=[
+                        [self.s1_list[2], self.s1_list[1]]])
+
+                self.__test_linked_sync_via_parent_op(
+                    "sync-linked", "-a",
+                    child_images=[{
+                        "image_name": "system:img1",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                        },{
+                        "image_name": "system:img2",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                    }])
+
+        def test_linked_sync_via_change_variant(self):
+                """Verify that if we update child images to be in sync with
+                their constraints when we do a change-variant."""
+
+                self.__test_linked_sync_via_child_op(
+                    "change-variant", "variant.foo=baz",
+                    change_packages=[
+                        [self.s1_list[2], self.s1_list[1]]],
+                    affect_packages=[
+                        self.foo1_list[2]],
+                    change_variants=[
+                        ['variant.foo', 'baz']])
+
+                self.__test_linked_sync_via_parent_op(
+                    "change-variant", "variant.foo=baz",
+                    li_md_change=False,
+                    affect_packages=[
+                        self.foo1_list[1], self.s1_list[1]],
+                    change_variants=[
+                        ['variant.foo', 'baz']],
+                    child_images=[{
+                        "image_name": "system:img2",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                    }])
+
+        def test_linked_sync_via_change_facet(self):
+                """Verify that if we update child images to be in sync with
+                their constraints when we do a change-facet."""
+
+                self.__test_linked_sync_via_child_op(
+                    "change-facet", "facet.foo=True",
+                    change_packages=[
+                        [self.s1_list[2], self.s1_list[1]]],
+                    change_facets=[
+                        ['facet.foo', True, None, 'local', False, False]])
+
+                self.__test_linked_sync_via_parent_op(
+                    "change-facet", "facet.foo=True",
+                    li_md_change=False,
+                    change_facets=[
+                        ['facet.foo', True, None, 'local', False, False]],
+                    child_images=[{
+                        "image_name": "system:img2",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                    }])
+
+        def test_linked_sync_via_uninstall(self):
+                """Verify that if we update child images to be in sync with
+                their constraints when we do an uninstall."""
+
+                self.__test_linked_sync_via_child_op(
+                    "uninstall", self.p_foo1_name[2],
+                    change_packages=[
+                        [self.s1_list[2], self.s1_list[1]]],
+                    remove_packages=[
+                        self.foo1_list[2]])
+
+                self.__test_linked_sync_via_parent_op(
+                    "uninstall", self.foo1_list[1],
+                    remove_packages=[
+                        self.foo1_list[1]],
+                    child_images=[{
+                        "image_name": "system:img1",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                        },{
+                        "image_name": "system:img2",
+                        "change_packages": [
+                            [self.s1_list[2], self.s1_list[1]]],
+                    }])
 
 
 class TestPkgLinked3(TestPkgLinked):
@@ -1338,12 +1552,15 @@
 
                 # there should be no updates with --no-parent-sync
                 self._pkg([1], "sync-linked -v --no-parent-sync", rv=EXIT_NOP)
-                self._pkg([1], "image-update -v --no-parent-sync", rv=EXIT_NOP)
-                self._pkg([1], "install -v --no-parent-sync %s" % \
-                    self.p_sync1_name[1], rv=EXIT_NOP)
                 self._pkg([1], "change-variant -v --no-parent-sync "
                     "variant.foo=bar", rv=EXIT_NOP)
-                # TODO: test set-property-linked
+                self._pkg([1], "change-facet -v --no-parent-sync "
+                    "facet.foo=False")
+                self._pkg([1], "install -v --no-parent-sync %s" % \
+                    self.p_foo1_name[1])
+                self._pkg([1], "update -v --no-parent-sync")
+                self._pkg([1], "uninstall -v --no-parent-sync %s" % \
+                    self.p_foo1_name[0])
 
                 # an audit without a parent sync should thingk we're in sync
                 self._pkg([1], "audit-linked --no-parent-sync")
@@ -1355,75 +1572,6 @@
                 # should still be out of sync.
                 self._pkg([1], "audit-linked", rv=EXIT_DIVERGED)
 
-        def test_parent_sync_2_via_sync(self):
-                self._imgs_create(2)
-
-                # install synced package into each image
-                self._pkg([0, 1], "install -v %s" % self.p_sync1_name[1])
-
-                self._attach_parent([1], 0)
-
-                # update parent image
-                self._pkg([0], "install -v %s" % self.p_sync1_name[0])
-
-                # verify that pkg operations sync parent metadata
-                self._pkg([1], "sync-linked -v -n")
-                self._pkg([1], "sync-linked -v")
-                self._pkg([1], "sync-linked -v", rv=EXIT_NOP)
-                self._pkg([1], "audit-linked")
-
-        def test_parent_sync_2_via_image_update(self):
-                self._imgs_create(2)
-
-                # install synced package into each image
-                self._pkg([0, 1], "install -v %s" % self.p_sync1_name[1])
-
-                self._attach_parent([1], 0)
-
-                # update parent image
-                self._pkg([0], "install -v %s" % self.p_sync1_name[0])
-
-                # verify that pkg operations sync parent metadata
-                self._pkg([1], "image-update -v -n")
-                self._pkg([1], "image-update -v")
-                self._pkg([1], "image-update -v", rv=EXIT_NOP)
-                self._pkg([1], "audit-linked")
-
-        def test_parent_sync_2_via_install(self):
-                self._imgs_create(2)
-
-                # install synced package into each image
-                self._pkg([0, 1], "install -v %s" % self.p_sync1_name[1])
-
-                self._attach_parent([1], 0)
-
-                # update parent image
-                self._pkg([0], "install -v %s" % self.p_sync1_name[0])
-
-                # verify that pkg operations sync parent metadata
-                self._pkg([1], "install -v -n %s" % self.p_sync1_name[0])
-                self._pkg([1], "install -v %s" % self.p_sync1_name[0])
-                self._pkg([1], "install -v %s" % self.p_sync1_name[0],
-                    rv=EXIT_NOP)
-                self._pkg([1], "audit-linked")
-
-        def test_parent_no_sync_2_via_change_variant(self):
-                self._imgs_create(2)
-
-                # install synced package into each image
-                self._pkg([0, 1], "install -v %s" % self.p_sync1_name[1])
-
-                self._attach_parent([1], 0)
-
-                # update parent image
-                self._pkg([0], "install -v %s" % self.p_sync1_name[0])
-
-                # verify that pkg operations sync parent metadata
-                self._pkg([1], "change-variant -v -n variant.foo=baz")
-                self._pkg([1], "change-variant -v variant.foo=baz")
-                self._pkg([1], "change-variant -v variant.foo=baz", rv=EXIT_NOP)
-                self._pkg([1], "audit-linked", rv=EXIT_DIVERGED)
-
         def test_install_constrainted(self):
                 self._imgs_create(3)
 
@@ -1450,75 +1598,6 @@
                 # install the same ver of a synced package in the child
                 self._pkg([1, 2], "install -v %s" % self.p_sync1_name[1])
 
-        def test_p2c_recurse_1_image_update(self):
-                self._imgs_create(3)
-
-                # install different synced package into each image
-                for i in [0, 1]:
-                        self._pkg([i], "install -v %s" % self.p_sync1_name[1])
-                for i in [2]:
-                        self._pkg([i], "install -v %s" % self.p_sync1_name[2])
-
-                # attach --linked-md-only doesn't install constraints package
-                self._attach_child(0, [1])
-                self._attach_child(0, [2], args="--linked-md-only")
-
-                self._pkg([0], "image-update -v -n")
-                self._pkg([0], "image-update --parsable=0")
-                self.assertEqualParsable(self.output,
-                    change_packages=[[self.s1_list[1], self.s1_list[0]]],
-                    child_images=[{
-                        "image_name": "system:img1",
-                        "change_packages": [[self.s1_list[1], self.s1_list[0]]],
-                    },
-                    {
-                        "image_name": "system:img2",
-                        "change_packages": [[self.s1_list[2], self.s1_list[0]]],
-                    }])
-                self._pkg([0], "image-update -v", rv=EXIT_NOP)
-
-                # make sure the latest synced packaged is in every image
-                for i in [0, 1, 2]:
-                        self._pkg([i], "list -v %s " % self.p_sync1_name[0])
-
-                # children should be synced
-                self._pkg([1, 2], "audit-linked")
-
-        def test_p2c_recurse_1_install_1(self):
-                self._imgs_create(3)
-
-                # install different synced package into each image
-                for i in [0, 1]:
-                        self._pkg([i], "install -v %s" % self.p_sync1_name[1])
-                for i in [2]:
-                        self._pkg([i], "install -v %s" % self.p_sync1_name[2])
-
-                # attach --linked-md-only doesn't install constraints package
-                self._attach_child(0, [1])
-                self._attach_child(0, [2], args="--linked-md-only")
-
-                self._pkg([0], "install -v -n %s" % self.p_sync1_name[0])
-                self._pkg([0], "install --parsable=0 %s" % self.p_sync1_name[0])
-                self.assertEqualParsable(self.output,
-                    change_packages=[[self.s1_list[1], self.s1_list[0]]],
-                    child_images=[{
-                        "image_name": "system:img1",
-                        "change_packages": [[self.s1_list[1], self.s1_list[0]]],
-                    },
-                    {
-                        "image_name": "system:img2",
-                        "change_packages": [[self.s1_list[2], self.s1_list[0]]],
-                    }])
-                self._pkg([0], "install -v %s" % self.p_sync1_name[0],
-                    rv=EXIT_NOP)
-
-                # make sure the latest synced packaged is in every image
-                for i in [0, 1, 2]:
-                        self._pkg([i], "list -v %s " % self.p_sync1_name[0])
-
-                # children should be synced
-                self._pkg([1, 2], "audit-linked")
-
         def test_verify(self):
                 self._imgs_create(5)
 
@@ -1548,6 +1627,1518 @@
                 self._pkg([0], "update --stage=prepare")
                 self._pkg([0], "update --stage=execute")
 
+        def __test_missing_parent_pkgs_metadata(self,
+            install="", audit_rv=EXIT_OK):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent package metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # paths for the linked image metadata files
+                md_files = [
+                        "%s/var/pkg/linked/linked_ppkgs" % self.i_path[i]
+                        for i in [1, 2]
+                ]
+
+                if install:
+                        for i in [0, 1, 2]:
+                                self._pkg([i], "install -v %s" % install)
+
+                # delete linked image metadata files
+                for f in md_files:
+                        self.file_exists(f)
+                        self._ccmd("rm %s" % f)
+
+                # verify that audit-linked can handle missing metadata.
+                self._pkg([0], "audit-linked -a")
+                self._pkg([2], "audit-linked")
+                self._pkg([1], "audit-linked", rv=audit_rv)
+                self._pkg([2], "audit-linked --no-parent-sync", rv=audit_rv)
+
+                # since we haven't modified the image, make sure the
+                # facet metadata files weren't re-created.
+                for f in md_files:
+                        self.file_doesnt_exist(f)
+
+                # verify that sync-linked can handle missing metadata.
+                # also verify that the operation will succeed and is
+                # not a noop (since it needs to update the metadata).
+                self._pkg([0], "sync-linked -a -n")
+                self._pkg([2], "sync-linked -n")
+
+                # since we haven't modified the image, make sure the
+                # facet metadata files weren't re-created.
+                for f in md_files:
+                        self.file_doesnt_exist(f)
+
+                # do a sync and verify that the files get created
+                self._pkg([0], "sync-linked -a")
+                self._pkg([2], "sync-linked")
+                for f in md_files:
+                        self.file_exists(f)
+
+        def test_missing_parent_pkgs_metadata_1(self):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent package metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly.
+
+                Test when parent has no packages installed.  The children also
+                have no packages installed so they are always in sync."""
+                self.__test_missing_parent_pkgs_metadata()
+
+        def test_missing_parent_pkgs_metadata_2(self):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent package metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly.
+
+                Test when parent and children have sync packages installed.
+                This means the children are diverged if their parent package
+                metadata is missing."""
+                self.__test_missing_parent_pkgs_metadata(
+                    install=self.p_sync1_name[0], audit_rv=EXIT_DIVERGED)
+
+        def __test_missing_parent_publisher_metadata(self,
+            clear_pubs=False):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent publisher metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # paths for the linked image metadata files
+                md_files = [
+                        "%s/var/pkg/linked/linked_ppubs" % self.i_path[i]
+                        for i in [1, 2]
+                ]
+
+                if clear_pubs:
+                        self._pkg([0, 1, 2], "unset-publisher test")
+
+                # delete linked image metadata files
+                for f in md_files:
+                        self.file_exists(f)
+                        self._ccmd("rm %s" % f)
+
+                # verify that audit-linked can handle missing metadata.
+                self._pkg([0], "audit-linked -a")
+                self._pkg([1, 2], "audit-linked")
+                self._pkg([2], "audit-linked --no-parent-sync")
+
+                # since we haven't modified the image, make sure the
+                # facet metadata files weren't re-created.
+                for f in md_files:
+                        self.file_doesnt_exist(f)
+
+                # verify that sync-linked can handle missing metadata.
+                # also verify that the operation will succeed and is
+                # not a noop (since it needs to update the metadata).
+                self._pkg([0], "sync-linked -a -n")
+                self._pkg([2], "sync-linked -n")
+
+                # since we haven't modified the image, make sure the
+                # facet metadata files weren't re-created.
+                for f in md_files:
+                        self.file_doesnt_exist(f)
+
+                # do a sync and verify that the files get created
+                self._pkg([0], "sync-linked -a")
+                self._pkg([2], "sync-linked")
+                for f in md_files:
+                        self.file_exists(f)
+
+        def test_missing_parent_publisher_metadata_1(self):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent publisher metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly.
+
+                Test when parent has no publishers configured."""
+                self.__test_missing_parent_publisher_metadata(
+                    clear_pubs=True)
+
+        def test_missing_parent_publisher_metadata_2(self):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent publisher metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly.
+
+                Test when parent has publishers configured."""
+                self.__test_missing_parent_publisher_metadata()
+
+
+class TestFacetInheritance(TestPkgLinked):
+        """Class to test facet inheritance between images.
+
+        These tests focus specifically on facet propagation from parent to
+        child images, masked facet handling, and facet reporting.  These tests
+        do not attempt to verify that the packaging system correctly handles
+        operations when facets and packages are changing at the same time."""
+
+        p_files = [
+            "tmp/foo1",
+            "tmp/foo2",
+            "tmp/foo3",
+            "tmp/sync1",
+            "tmp/sync2",
+            "tmp/sync3",
+        ]
+        p_foo_template = """
+            open foo@%(ver)d
+            add file tmp/foo1 mode=0555 owner=root group=bin path=foo1_foo1 facet.foo1=true
+            add file tmp/foo2 mode=0555 owner=root group=bin path=foo1_foo2 facet.foo2=true
+            add file tmp/foo3 mode=0555 owner=root group=bin path=foo1_foo3 facet.foo3=true
+            close"""
+        p_sync1_template = """
+            open sync1@%(ver)d
+            add file tmp/sync1 mode=0555 owner=root group=bin path=sync1_sync1 facet.sync1=true
+            add file tmp/sync2 mode=0555 owner=root group=bin path=sync1_sync2 facet.sync2=true
+            add file tmp/sync3 mode=0555 owner=root group=bin path=sync1_sync3 facet.sync3=true
+            add depend type=parent fmri=feature/package/dependency/self
+            close"""
+        p_sync2_template = """
+            open sync2@%(ver)d
+            add file tmp/sync1 mode=0555 owner=root group=bin path=sync2_sync1 facet.sync1=true
+            add file tmp/sync2 mode=0555 owner=root group=bin path=sync2_sync2 facet.sync2=true
+            add file tmp/sync3 mode=0555 owner=root group=bin path=sync2_sync3 facet.sync3=true
+            add depend type=parent fmri=feature/package/dependency/self
+            close"""
+        p_inc1_template = """
+            open inc1@%(ver)d
+            add depend type=require fmri=sync1
+            add depend type=incorporate fmri=sync1@%(ver)d facet.123456=true
+            add depend type=parent fmri=feature/package/dependency/self
+            close"""
+        p_inc2_template = """
+            open inc2@%(ver)d
+            add depend type=require fmri=sync2
+            add depend type=incorporate fmri=sync2@%(ver)d facet.456789=true
+            add depend type=parent fmri=feature/package/dependency/self
+            close"""
+
+        p_data_template = [
+            p_foo_template,
+            p_sync1_template,
+            p_sync2_template,
+            p_inc1_template,
+            p_inc2_template,
+        ]
+        p_data = []
+        for i in range(2):
+                for j in p_data_template:
+                        p_data.append(j % {"ver": (i + 1)})
+        p_fmri = {}
+
+        def setUp(self):
+                self.i_count = 3
+                pkg5unittest.ManyDepotTestCase.setUp(self, ["test"],
+                    image_count=self.i_count)
+
+                # create files that go in packages
+                self.make_misc_files(self.p_files)
+
+                # get repo url
+                self.rurl1 = self.dcs[1].get_repo_url()
+
+                # populate repository
+                for p in self.p_data:
+                        fmristr = self.pkgsend_bulk(self.rurl1, p)[0]
+                        f = fmri.PkgFmri(fmristr, "5.11")
+                        pkgstr = "%s@%s" % (f.pkg_name, f.version.release)
+                        self.p_fmri[pkgstr] = fmristr
+
+                # setup image names and paths
+                self.i_name = []
+                self.i_path = []
+                self.i_api = []
+                self.i_api_reset = []
+                for i in range(self.i_count):
+                        name = "system:img%d" % i
+                        self.i_name.insert(i, name)
+                        self.i_path.insert(i, self.img_path(i))
+
+        def test_facet_inheritance(self):
+                """Verify basic facet inheritance functionality for both push
+                and pull children."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # install packages with inheritable facets in all images
+                self._pkg([0, 1, 2], "install -v %s" % self.p_fmri["inc1@2"])
+                self._pkg([0, 1, 2], "install -v %s" % self.p_fmri["inc2@2"])
+
+                # verify that there are no facets set in any images
+                self._pkg([0, 1, 2], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(""))
+
+                # set some random facets and make sure they aren't inherited
+                # or affected by inherited facets
+                output = {}
+                for i in [0, 1, 2]:
+                        i2 = i + 1
+                        self._pkg([i], "change-facet "
+                            "sync%d=False foo%d=True" % (i2, i2))
+                for i in [0, 1, 2]:
+                        i2 = i + 1
+                        output = \
+                            "facet.foo%d\tTrue\tlocal\n" % i2 + \
+                            "facet.sync%d\tFalse\tlocal\n" % i2
+                        self._pkg([i], "facet -H -F tsv", \
+                            output_cb=self._assertEqual_cb(output))
+
+                # disable an inheritable facet and verify it gets inherited
+                self._pkg([0], "change-facet 123456=False")
+                self._pkg([2], "sync-linked")
+                for i in [1, 2]:
+                        i2 = i + 1
+                        output = \
+                            "facet.123456\tFalse\tparent\n" + \
+                            "facet.foo%d\tTrue\tlocal\n" % i2 + \
+                            "facet.sync%d\tFalse\tlocal\n" % i2
+                        self._pkg([i], "facet -H -F tsv", \
+                            output_cb=self._assertEqual_cb(output))
+
+                # enable an inheritable facet and verify it doesn't get
+                # inherited
+                self._pkg([0], "change-facet 123456=True")
+                self._pkg([2], "sync-linked")
+                for i in [1, 2]:
+                        i2 = i + 1
+                        output = \
+                            "facet.foo%d\tTrue\tlocal\n" % i2 + \
+                            "facet.sync%d\tFalse\tlocal\n" % i2
+                        self._pkg([i], "facet -H -F tsv", \
+                            output_cb=self._assertEqual_cb(output))
+
+                # clear an inheritable facet and verify it doesn't get
+                # inherited
+                self._pkg([0], "change-facet 123456=False")
+                self._pkg([2], "sync-linked")
+                self._pkg([0], "change-facet 123456=None")
+                self._pkg([2], "sync-linked")
+                for i in [1, 2]:
+                        i2 = i + 1
+                        output = \
+                            "facet.foo%d\tTrue\tlocal\n" % i2 + \
+                            "facet.sync%d\tFalse\tlocal\n" % i2
+                        self._pkg([i], "facet -H -F tsv", \
+                            output_cb=self._assertEqual_cb(output))
+
+        def test_facet_inheritance_globs(self):
+                """Verify that all facet glob patterns which affect
+                inheritable facets get propagated to children."""
+
+                # create parent (0), push child (1)
+                self._imgs_create(2)
+                self._attach_child(0, [1])
+
+                self._pkg([0], "change-facet" +
+                    " 123456=False" +
+                    " 456789=True" +
+                    " *456*=False" +
+                    " *789=True" +
+                    " 123*=True")
+
+                # verify that no facets are inherited
+                output = ""
+                self._pkg([1], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(output))
+
+                # install packages with inheritable facets in the parent
+                self._pkg([0], "install -v %s" % self.p_fmri["inc1@2"])
+
+                # verify that three facets are inherited
+                output = ""
+                output += "facet.*456*\tFalse\tparent\n"
+                output += "facet.123*\tTrue\tparent\n"
+                output += "facet.123456\tFalse\tparent\n"
+                self._pkg([1], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(output))
+
+                # install packages with inheritable facets in the parent
+                self._pkg([0], "install -v %s" % self.p_fmri["inc2@2"])
+
+                # verify that five facets are inherited
+                output = ""
+                output += "facet.*456*\tFalse\tparent\n"
+                output += "facet.*789\tTrue\tparent\n"
+                output += "facet.123*\tTrue\tparent\n"
+                output += "facet.123456\tFalse\tparent\n"
+                output += "facet.456789\tTrue\tparent\n"
+                self._pkg([1], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(output))
+
+                # remove packages with inheritable facets in the parent
+                self._pkg([0], "uninstall -v %s" % self.p_fmri["inc1@2"])
+
+                # verify that three facets are inherited
+                output = ""
+                output += "facet.*456*\tFalse\tparent\n"
+                output += "facet.*789\tTrue\tparent\n"
+                output += "facet.456789\tTrue\tparent\n"
+                self._pkg([1], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(output))
+
+                # remove packages with inheritable facets in the parent
+                self._pkg([0], "uninstall -v %s" % self.p_fmri["inc2@2"])
+
+                # verify that no facets are inherited
+                output = ""
+                self._pkg([1], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(output))
+
+        def test_facet_inheritance_masked_system(self):
+                """Test reporting of system facets."""
+
+                # create image (0)
+                self._imgs_create(1)
+
+                # install a package with facets in the image
+                self._pkg([0], "install -v %s" % self.p_fmri["foo@2"])
+
+                # set a facet
+                self._pkg([0], "change-facet 'f*1'=False")
+
+                # verify masked output
+                output_am  = \
+                    "facet.f*1\tFalse\tlocal\tFalse\n" + \
+                    "facet.foo1\tFalse\tlocal\tFalse\n" + \
+                    "facet.foo2\tTrue\tsystem\tFalse\n" + \
+                    "facet.foo3\tTrue\tsystem\tFalse\n"
+                output_im  = \
+                    "facet.foo1\tFalse\tlocal\tFalse\n" + \
+                    "facet.foo2\tTrue\tsystem\tFalse\n" + \
+                    "facet.foo3\tTrue\tsystem\tFalse\n"
+                self._pkg([0], "facet -H -F tsv -m -a", \
+                    output_cb=self._assertEqual_cb(output_am))
+                self._pkg([0], "facet -H -F tsv -m -i", \
+                    output_cb=self._assertEqual_cb(output_im))
+
+        def test_facet_inheritance_masked_preserve(self):
+                """Test handling for masked facets
+
+                Verify that pre-existing local facet settings which get masked
+                by inherited facets get restored when the inherited facets go
+                away."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # install a package with inheritable facets in the parent
+                self._pkg([0], "install -v %s" % self.p_fmri["inc1@2"])
+
+                for fv in ["True", "False"]:
+
+                        # set inheritable facet locally in children
+                        self._pkg([1, 2], "change-facet 123456=%s" % fv)
+
+                        # disable inheritable facet in parent
+                        self._pkg([0], "change-facet 123456=False")
+                        self._pkg([2], "sync-linked")
+
+                        # verify inheritable facet is disabled in children
+                        output = "facet.123456\tFalse\tparent\n"
+                        output_m = \
+                            "facet.123456\tFalse\tparent\tFalse\n" + \
+                            "facet.123456\t%s\tlocal\tTrue\n" % fv
+                        for i in [1, 2]:
+                                self._pkg([i], "facet -H -F tsv", \
+                                    output_cb=self._assertEqual_cb(output))
+                                self._pkg([i], "facet -H -F tsv -m", \
+                                    output_cb=self._assertEqual_cb(output_m))
+
+                        # clear inheritable facet in the parent
+                        self._pkg([0], "change-facet 123456=None")
+                        self._pkg([2], "sync-linked")
+
+                        # verify the local child setting is restored
+                        output = "facet.123456\t%s\tlocal\n" % fv
+                        output_m = "facet.123456\t%s\tlocal\tFalse\n" % fv
+                        for i in [1, 2]:
+                                self._pkg([i], "facet -H -F tsv", \
+                                    output_cb=self._assertEqual_cb(output))
+                                self._pkg([i], "facet -H -F tsv -m", \
+                                    output_cb=self._assertEqual_cb(output_m))
+
+        def test_facet_inheritance_masked_update(self):
+                """Test handling for masked facets.
+
+                Verify that local facet changes can be made while inherited
+                facets masking the local settings exist."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # install a package with inheritable facets in the parent
+                self._pkg([0], "install -v %s" % self.p_fmri["inc1@2"])
+
+                # disable inheritable facet in parent
+                self._pkg([0], "change-facet 123456=False")
+                self._pkg([2], "sync-linked")
+
+                # clear inheritable facet in children
+                # the facet is not set in the child so this is a noop
+                self._pkg([1, 2], "change-facet 123456=None", rv=EXIT_NOP)
+
+                # verify inheritable facet is disabled in children
+                output = "facet.123456\tFalse\tparent\n"
+                output_m = "facet.123456\tFalse\tparent\tFalse\n"
+                for i in [1, 2]:
+                        self._pkg([i], "facet -H -F tsv", \
+                            output_cb=self._assertEqual_cb(output))
+                        self._pkg([i], "facet -H -F tsv -m", \
+                            output_cb=self._assertEqual_cb(output_m))
+
+                for fv in ["True", "False"]:
+
+                        # set inheritable facet locally in children
+                        self._pkg([1, 2], "change-facet 123456=%s" % fv)
+
+                        # verify inheritable facet is disabled in children
+                        output = "facet.123456\tFalse\tparent\n"
+                        output_m = \
+                            "facet.123456\tFalse\tparent\tFalse\n" + \
+                            "facet.123456\t%s\tlocal\tTrue\n" % fv
+                        for i in [1, 2]:
+                                self._pkg([i], "facet -H -F tsv", \
+                                    output_cb=self._assertEqual_cb(output))
+                                self._pkg([i], "facet -H -F tsv -m", \
+                                    output_cb=self._assertEqual_cb(output_m))
+
+                        # re-set inheritable facet locall in children
+                        # this is a noop
+                        self._pkg([1, 2], "change-facet 123456=%s" % fv,
+                            rv=EXIT_NOP)
+
+                        # clear inheritable facet in the parent
+                        self._pkg([0], "change-facet 123456=None")
+                        self._pkg([2], "sync-linked")
+
+                        # verify the local child setting is restored
+                        output = "facet.123456\t%s\tlocal\n" % fv
+                        output_m = "facet.123456\t%s\tlocal\tFalse\n" % fv
+                        for i in [1, 2]:
+                                self._pkg([i], "facet -H -F tsv", \
+                                    output_cb=self._assertEqual_cb(output))
+                                self._pkg([i], "facet -H -F tsv -m", \
+                                    output_cb=self._assertEqual_cb(output_m))
+
+                        # disable inheritable facet in parent
+                        self._pkg([0], "change-facet 123456=False")
+                        self._pkg([2], "sync-linked")
+
+                # clear inheritable facet locally in children
+                self._pkg([1, 2], "change-facet 123456=None")
+
+                # verify inheritable facet is disabled in children
+                output = "facet.123456\tFalse\tparent\n"
+                output_m = "facet.123456\tFalse\tparent\tFalse\n"
+                for i in [1, 2]:
+                        self._pkg([i], "facet -H -F tsv", \
+                            output_cb=self._assertEqual_cb(output))
+                        self._pkg([i], "facet -H -F tsv -m", \
+                            output_cb=self._assertEqual_cb(output_m))
+
+                # re-clear inheritable facet locally in children
+                # this is a noop
+                self._pkg([1, 2], "change-facet 123456=None", rv=EXIT_NOP)
+
+                # clear inheritable facet in the parent
+                self._pkg([0], "change-facet 123456=None")
+                self._pkg([2], "sync-linked")
+
+                # verify the local child setting is restored
+                for i in [1, 2]:
+                        self._pkg([i], "facet -H -F tsv", \
+                            output_cb=self._assertEqual_cb(""))
+                        self._pkg([i], "facet -H -F tsv -m", \
+                            output_cb=self._assertEqual_cb(""))
+
+        def __test_facet_inheritance_via_op(self, op):
+                """Verify that if we do a an "op" operation, the latest facet
+                data gets pushed/pulled to child images."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # install synced incorporations
+                self._pkg([0, 1, 2], "install -v %s %s" %
+                    (self.p_fmri["inc1@1"], self.p_fmri["foo@1"]))
+
+                # disable a random facet in all images
+                self._pkg([0, 1, 2], "change-facet -I foo=False")
+
+                # disable an inheritable facet in the parent while ignoring
+                # children.
+                self._pkg([0], "change-facet -I 123456=False")
+
+                # verify that the change hasn't been propagated to the child
+                output = "facet.foo\tFalse\tlocal\n"
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # do "op" in the parent and verify the latest facet data was
+                # pushed to the child
+                self._pkg([0], op)
+                output  = "facet.123456\tFalse\tparent\n"
+                output += "facet.foo\tFalse\tlocal\n"
+                self._pkg([1], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # do "op" in the child and verify the latest facet data was
+                # pulled from the parent.
+                self._pkg([2], op)
+                output  = "facet.123456\tFalse\tparent\n"
+                output += "facet.foo\tFalse\tlocal\n"
+                self._pkg([2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+        def test_facet_inheritance_via_noop_update(self):
+                """Verify that if we do a noop update operation, the
+                latest facet data still gets pushed/pulled to child images."""
+
+                self.__test_facet_inheritance_via_op(
+                    "update")
+
+        def test_facet_inheritance_via_noop_install(self):
+                """Verify that if we do a noop install operation, the
+                latest facet data still gets pushed/pulled to child images."""
+
+                self.__test_facet_inheritance_via_op(
+                    "install -v %s" % self.p_fmri["inc1@1"])
+
+        def test_facet_inheritance_via_noop_change_facet(self):
+                """Verify that if we do a noop change-facet operation on a
+                parent image, the latest facet data still gets pushed out to
+                child images."""
+
+                self.__test_facet_inheritance_via_op(
+                    "change-facet foo=False")
+
+        def test_facet_inheritance_via_uninstall(self):
+                """Verify that if we do an uninstall operation on a
+                parent image, the latest facet data still gets pushed out to
+                child images."""
+
+                self.__test_facet_inheritance_via_op(
+                    "uninstall -v %s" % self.p_fmri["foo@1"])
+
+        def test_facet_inheritance_cleanup_via_detach(self):
+                """Verify that if we detach a child linked image, that any
+                inherited facets go away."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # install synced incorporations
+                self._pkg([0, 1, 2], "install -v %s %s" %
+                    (self.p_fmri["inc1@1"], self.p_fmri["foo@1"]))
+
+                # disable a random facet in all images
+                self._pkg([0, 1, 2], "change-facet -I foo=False")
+
+                # disable an inheritable facet in the parent and make sure the
+                # change propagates to all children
+                self._pkg([0], "change-facet 123456=False")
+                self._pkg([2], "sync-linked")
+                output  = "facet.123456\tFalse\tparent\n"
+                output += "facet.foo\tFalse\tlocal\n"
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # simulate detaching children via metadata only
+                # verify the inherited facets don't get removed
+                self._pkg([0], "detach-linked --linked-md-only -n -l %s" %
+                    self.i_name[1])
+                self._pkg([2], "detach-linked --linked-md-only -n")
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # simulate detaching children
+                # verify the inherited facets don't get removed
+                self._pkg([0], "detach-linked -n -l %s" % self.i_name[1])
+                self._pkg([2], "detach-linked -n")
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # detach children via metadata only
+                # verify the inherited facets don't get removed
+                # (they can't get removed until we modify the image)
+                self._pkg([0], "detach-linked --linked-md-only -l %s" %
+                    self.i_name[1])
+                self._pkg([2], "detach-linked --linked-md-only")
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # re-attach children and sanity check facets
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # try to detach children with --no-pkg-updates
+                # verify this fails
+                # (removal of inherited facets is the equilivant of a
+                # change-facet operation, which requires updating all
+                # packages, but since we've specified no pkg updates this must
+                # fail.)
+                self._pkg([0], "detach-linked --no-pkg-updates -l %s" %
+                    self.i_name[1], rv=EXIT_OOPS)
+                self._pkg([2], "detach-linked --no-pkg-updates", rv=EXIT_OOPS)
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+                # detach children
+                # verify the inherited facets get removed
+                self._pkg([0], "detach-linked -l %s" % self.i_name[1])
+                self._pkg([2], "detach-linked")
+                output = "facet.foo\tFalse\tlocal\n"
+                self._pkg([1, 2], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+        def __test_missing_facet_inheritance_metadata(self, pfacets="",
+            cfacet_output=""):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent facet metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly."""
+
+                # create parent (0), push child (1), and pull child (2)
+                self._imgs_create(3)
+                self._attach_child(0, [1])
+                self._attach_parent([2], 0)
+
+                # paths for the linked image metadata files
+                md_files = [
+                        "%s/var/pkg/linked/linked_pfacets" % self.i_path[i]
+                        for i in [1, 2]
+                ]
+
+                # isntall foo into each image
+                self._pkg([0], "install -v %s" % self.p_fmri["foo@1"])
+
+                # install synced incorporation and package
+                self._pkg([0], "install -v %s" % self.p_fmri["inc1@1"])
+                self._pkg([2], "sync-linked")
+
+                if pfacets:
+                        self._pkg([0], "change-facet %s" % pfacets)
+                        self._pkg([2], "sync-linked")
+
+                # verify the child facet settings
+                self._pkg([1, 2], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(cfacet_output))
+
+                # verify that the child images are in sync.
+                # verify that a sync-linked is a noop
+                self._pkg([0], "audit-linked -a")
+                self._pkg([1, 2], "audit-linked")
+                self._pkg([0], "sync-linked -a -n", rv=EXIT_NOP)
+                self._pkg([2], "sync-linked -n", rv=EXIT_NOP)
+
+                # delete linked image metadata files
+                for f in md_files:
+                        self.file_exists(f)
+                        self._ccmd("rm %s" % f)
+
+                # verify the child facet settings
+                self._pkg([1, 2], "facet -H -F tsv", \
+                    output_cb=self._assertEqual_cb(cfacet_output))
+
+                # verify that audit-linked can handle missing metadata.
+                self._pkg([0], "audit-linked -a")
+                self._pkg([1, 2], "audit-linked")
+                self._pkg([2], "audit-linked --no-parent-sync")
+
+                # verify that sync-linked can handle missing metadata.
+                # also verify that the operation will succeed and is
+                # not a noop (since it needs to update the metadata).
+                self._pkg([0], "sync-linked -a -n")
+                self._pkg([2], "sync-linked -n")
+
+                # since we haven't modified the image, make sure the
+                # facet metadata files weren't re-created.
+                for f in md_files:
+                        self.file_doesnt_exist(f)
+
+                # do a sync and verify that the files get created
+                self._pkg([0], "sync-linked -a")
+                self._pkg([2], "sync-linked")
+                for f in md_files:
+                        self.file_exists(f)
+
+        def test_missing_facet_inheritance_metadata_1(self):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent facet metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly.
+
+                Test when there are no inherited facets present."""
+                self.__test_missing_facet_inheritance_metadata()
+
+        def test_missing_facet_inheritance_metadata_2(self):
+                """Verify that we can manipulate and update linked child
+                images which are missing their parent facet metadata.  Also
+                verify that when we update those children the metadata gets
+                updated correctly.
+
+                Test with inherited facets present"""
+                self.__test_missing_facet_inheritance_metadata(
+                    pfacets="123456=False",
+                    cfacet_output="facet.123456\tFalse\tparent\n")
+
+
+class TestConcurrentFacetChange(TestPkgLinked):
+        """Class to test that packaging operations work correctly when facets
+        are changing concurrently.
+
+        These tests do not focus on verifying that facets are propagated
+        correctly from parent to child images."""
+
+        p_misc = """
+            open misc@1,5.11-0
+            close"""
+        p_common = """
+            open common@1,5.11-0
+            close"""
+        p_AA_sync_template = """
+            open AA-sync@%(ver)d,5.11-0
+            add set name=variant.foo value=bar value=baz
+            add depend type=require fmri=common
+            add depend type=require fmri=A-incorp-sync
+            add depend type=parent fmri=feature/package/dependency/self \
+                variant.foo=bar
+            close"""
+        p_AB_sync_template = """
+            open AB-sync@%(ver)d,5.11-0
+            add set name=variant.foo value=bar value=baz
+            add depend type=require fmri=common
+            add depend type=require fmri=A-incorp-sync
+            add depend type=parent fmri=feature/package/dependency/self \
+                variant.foo=bar
+            close"""
+        p_BA_template = """
+            open BA@%(ver)d,5.11-0
+            add depend type=require fmri=common
+            add depend type=require fmri=B-incorp-sync
+            close"""
+        p_CA_template = """
+            open CA@%(ver)d,5.11-0
+            add depend type=require fmri=common
+            add depend type=require fmri=C-incorp
+            close"""
+        p_A_incorp_sync_template = """
+            open A-incorp-sync@%(ver)d,5.11-0
+            add set name=variant.foo value=bar value=baz
+            add depend type=incorporate fmri=AA-sync@%(ver)d facet.AA-sync=true
+            add depend type=incorporate fmri=AB-sync@%(ver)d facet.AA-sync=true
+            add depend type=parent fmri=feature/package/dependency/self \
+                variant.foo=bar
+            close"""
+        p_B_incorp_sync_template = """
+            open B-incorp-sync@%(ver)d,5.11-0
+            add set name=variant.foo value=bar value=baz
+            add depend type=incorporate fmri=BA@%(ver)d facet.BA=true
+            add depend type=parent fmri=feature/package/dependency/self \
+                variant.foo=bar
+            close"""
+        p_C_incorp_template = """
+            open C-incorp@%(ver)d,5.11-0
+            add depend type=incorporate fmri=CA@%(ver)d facet.CA=true
+            close"""
+        p_entire_sync_template = """
+            open entire-sync@%(ver)d,5.11-0
+            add set name=variant.foo value=bar value=baz
+            add depend type=require fmri=A-incorp-sync
+            add depend type=incorporate fmri=A-incorp-sync@%(ver)d \
+                facet.A-incorp-sync=true
+            add depend type=require fmri=B-incorp-sync
+            add depend type=incorporate fmri=B-incorp-sync@%(ver)d \
+                facet.B-incorp-sync=true
+            add depend type=require fmri=C-incorp
+            add depend type=incorporate fmri=C-incorp@%(ver)d \
+                facet.C-incorp=true
+            add depend type=parent fmri=feature/package/dependency/self \
+                variant.foo=bar
+            close"""
+
+        p_data_template = [
+            p_AA_sync_template,
+            p_AB_sync_template,
+            p_BA_template,
+            p_CA_template,
+            p_A_incorp_sync_template,
+            p_B_incorp_sync_template,
+            p_C_incorp_template,
+            p_entire_sync_template,
+        ]
+
+        p_data = [p_misc, p_common]
+        for i in range(4):
+                for j in p_data_template:
+                        p_data.append(j % {"ver": (i + 1)})
+        p_fmri = {}
+
+        def setUp(self):
+                self.i_count = 2
+                pkg5unittest.ManyDepotTestCase.setUp(self, ["test"],
+                    image_count=self.i_count)
+
+                # get repo url
+                self.rurl1 = self.dcs[1].get_repo_url()
+
+                # populate repository
+                for p in self.p_data:
+                        fmristr = self.pkgsend_bulk(self.rurl1, p)[0]
+                        f = fmri.PkgFmri(fmristr, "5.11")
+                        pkgstr = "%s@%s" % (f.pkg_name, f.version.release)
+                        self.p_fmri[pkgstr] = fmristr
+
+                # setup image names and paths
+                self.i_name = []
+                self.i_path = []
+                self.i_api = []
+                self.i_api_reset = []
+                for i in range(self.i_count):
+                        name = "system:img%d" % i
+                        self.i_name.insert(i, name)
+                        self.i_path.insert(i, self.img_path(i))
+
+        def __test_concurrent_facet_change_via_child_op(self,
+            op, op_args, extra_child_pkgs=None, child_variants=None,
+            child_pre_op_audit=True, **kwargs):
+                """Verify that if we do a operation "op" on a child image, it
+                automatically brings its packages in sync with its parent."""
+
+                # create parent (0) and pull child (1)
+                self._imgs_create(2)
+
+                # setup the parent image
+                parent_facets = [
+                    "facet.AA-sync=False",
+                    "facet.A-incorp-sync=False",
+                    "facet.BA=False",
+                ]
+                parent_pkgs = [
+                    "A-incorp-sync@3",
+                    "AA-sync@4",
+                    "B-incorp-sync@2",
+                    "BA@3",
+                    "C-incorp@2",
+                    "CA@2",
+                    "entire-sync@2",
+                ]
+                self._pkg([0], "change-facet -v %s" % " ".join(parent_facets))
+                self._pkg([0], "install -v %s" % " ".join(parent_pkgs))
+
+                # setup the child image
+                child_facets = [
+                    "facet.C*=False",
+                ]
+                child_pkgs = [
+                    "A-incorp-sync@1",
+                    "AA-sync@1",
+                    "B-incorp-sync@1",
+                    "BA@1",
+                    "C-incorp@1",
+                    "CA@1",
+                    "entire-sync@1",
+                ]
+                self._pkg([1], "change-facet -v %s" % " ".join(child_facets))
+                if child_variants is not None:
+                        self._pkg([1], "change-variant -v %s" %
+                            " ".join(child_variants))
+                self._pkg([1], "install -v %s" % " ".join(child_pkgs))
+                if extra_child_pkgs:
+                        self._pkg([1], "install -v %s" %
+                            " ".join(extra_child_pkgs))
+
+                # attach the child but don't sync it
+                self._attach_parent([1], 0, args="--linked-md-only")
+
+                # verify the child image is still diverged
+                if child_pre_op_audit:
+                        self._pkg([1], "audit-linked", rv=EXIT_DIVERGED)
+
+                # try and then execute op
+                def output_cb(output):
+                        self.assertEqualParsable(output, **kwargs)
+                self._pkg([1], "%s -nv %s" % (op, op_args))
+                self._pkg([1], "%s --parsable=0 %s" % (op, op_args),
+                    output_cb=output_cb)
+
+                # verify sync via audit and sync (which should be a noop)
+                self._pkg([1], "audit-linked")
+                self._pkg([1], "sync-linked -v", rv=EXIT_NOP)
+
+        def __pkg_names_to_fmris(self, remove_packages):
+                """Convert a list of pkg names to fmris"""
+                rv = []
+                for s in remove_packages:
+                        rv.append(self.p_fmri[s])
+                return rv
+
+        def __pkg_name_tuples_to_fmris(self, change_packages):
+                """Convert a list of pkg name tuples to fmris"""
+                rv = []
+                for s, d in change_packages:
+                        rv.append([self.p_fmri[s], self.p_fmri[d]])
+                return rv
+
+        def test_concurrent_facet_change_via_update(self):
+                """Verify that we can update and sync a child
+                image while inherited facets are changing."""
+
+                change_facets = [
+                    ['facet.A-incorp-sync',
+                        False, None, 'parent', False, False],
+                    ['facet.AA-sync', False, None, 'parent', False, False],
+                ]
+                remove_packages = self.__pkg_names_to_fmris([
+                    "AB-sync@1",
+                ])
+                change_packages = self.__pkg_name_tuples_to_fmris([
+                    ["A-incorp-sync@1", "A-incorp-sync@3"],
+                    ["AA-sync@1",       "AA-sync@4"],
+                    ["B-incorp-sync@1", "B-incorp-sync@2"],
+                    ["BA@1",            "BA@2"],
+                    ["C-incorp@1",      "C-incorp@4"],
+                    ["CA@1",            "CA@4"],
+                    ["entire-sync@1",   "entire-sync@2"],
+                ])
+                self.__test_concurrent_facet_change_via_child_op(
+                    "update", "--reject AB-sync",
+                    extra_child_pkgs=["AB-sync@1"],
+                    change_facets=change_facets,
+                    remove_packages=remove_packages,
+                    change_packages=change_packages)
+
+        def test_concurrent_facet_change_via_update_pkg(self):
+                """Verify that we can update a package and sync a child
+                image while inherited facets are changing."""
+
+                change_facets = [
+                    ['facet.A-incorp-sync',
+                        False, None, 'parent', False, False],
+                    ['facet.AA-sync', False, None, 'parent', False, False],
+                ]
+                remove_packages = self.__pkg_names_to_fmris([
+                    "AB-sync@1",
+                ])
+                change_packages = self.__pkg_name_tuples_to_fmris([
+                    ["A-incorp-sync@1", "A-incorp-sync@3"],
+                    ["AA-sync@1",       "AA-sync@4"],
+                    ["B-incorp-sync@1", "B-incorp-sync@2"],
+                    ["BA@1",            "BA@2"],
+                    ["entire-sync@1",   "entire-sync@2"],
+                ])
+
+                # verify update pkg
+                self.__test_concurrent_facet_change_via_child_op(
+                    "update", "--reject AB-sync common",
+                    extra_child_pkgs=["AB-sync@1"],
+                    change_facets=change_facets,
+                    remove_packages=remove_packages,
+                    change_packages=change_packages)
+
+        def test_concurrent_facet_change_via_install(self):
+                """Verify that we can install a package and sync a child
+                image while inherited facets are changing."""
+
+                change_facets = [
+                    ['facet.A-incorp-sync',
+                        False, None, 'parent', False, False],
+                    ['facet.AA-sync', False, None, 'parent', False, False],
+                ]
+                remove_packages = self.__pkg_names_to_fmris([
+                    "AB-sync@1",
+                ])
+                add_packages = self.__pkg_names_to_fmris([
+                    "misc@1",
+                ])
+                change_packages = self.__pkg_name_tuples_to_fmris([
+                    ["A-incorp-sync@1", "A-incorp-sync@3"],
+                    ["AA-sync@1",       "AA-sync@4"],
+                    ["B-incorp-sync@1", "B-incorp-sync@2"],
+                    ["BA@1",            "BA@2"],
+                    ["entire-sync@1",   "entire-sync@2"],
+                ])
+                self.__test_concurrent_facet_change_via_child_op(
+                    "install", "--reject AB-sync misc",
+                    extra_child_pkgs=["AB-sync@1"],
+                    change_facets=change_facets,
+                    remove_packages=remove_packages,
+                    add_packages=add_packages,
+                    change_packages=change_packages)
+
+        def test_concurrent_facet_change_via_sync(self):
+                """Verify that we can sync a child
+                image while inherited facets are changing."""
+
+                change_facets = [
+                    ['facet.A-incorp-sync',
+                        False, None, 'parent', False, False],
+                    ['facet.AA-sync', False, None, 'parent', False, False],
+                ]
+                remove_packages = self.__pkg_names_to_fmris([
+                    "AB-sync@1",
+                ])
+                change_packages = self.__pkg_name_tuples_to_fmris([
+                    ["A-incorp-sync@1", "A-incorp-sync@3"],
+                    ["AA-sync@1",       "AA-sync@4"],
+                    ["B-incorp-sync@1", "B-incorp-sync@2"],
+                    ["BA@1",            "BA@2"],
+                    ["entire-sync@1",   "entire-sync@2"],
+                ])
+                self.__test_concurrent_facet_change_via_child_op(
+                    "sync-linked", "--reject AB-sync",
+                    extra_child_pkgs=["AB-sync@1"],
+                    change_facets=change_facets,
+                    remove_packages=remove_packages,
+                    change_packages=change_packages)
+
+        def test_concurrent_facet_change_via_uninstall(self):
+                """Verify that we can uninstall a package and sync a child
+                image while inherited facets are changing."""
+
+                change_facets = [
+                    ['facet.A-incorp-sync',
+                        False, None, 'parent', False, False],
+                    ['facet.AA-sync', False, None, 'parent', False, False],
+                ]
+                remove_packages = self.__pkg_names_to_fmris([
+                    "AB-sync@1",
+                ])
+                change_packages = self.__pkg_name_tuples_to_fmris([
+                    ["A-incorp-sync@1", "A-incorp-sync@3"],
+                    ["AA-sync@1",       "AA-sync@4"],
+                    ["B-incorp-sync@1", "B-incorp-sync@2"],
+                    ["BA@1",            "BA@2"],
+                    ["entire-sync@1",   "entire-sync@2"],
+                ])
+                self.__test_concurrent_facet_change_via_child_op(
+                    "uninstall", "AB-sync",
+                    extra_child_pkgs=["AB-sync@1"],
+                    change_facets=change_facets,
+                    remove_packages=remove_packages,
+                    change_packages=change_packages)
+
+        def test_concurrent_facet_change_via_change_variant(self):
+                """Verify that we can change variants and sync a child
+                image while inherited facets are changing."""
+
+                change_facets = [
+                    ["facet.A-incorp-sync",
+                        False, None, "parent", False, False],
+                    ["facet.AA-sync", False, None, "parent", False, False],
+                ]
+                change_variants = [
+                    ["variant.foo", "bar"]
+                ]
+                change_packages = self.__pkg_name_tuples_to_fmris([
+                    ["A-incorp-sync@1", "A-incorp-sync@3"],
+                    ["AA-sync@1",       "AA-sync@4"],
+                    ["B-incorp-sync@1", "B-incorp-sync@2"],
+                    ["BA@1",            "BA@2"],
+                    ["entire-sync@1",   "entire-sync@2"],
+                ])
+                self.__test_concurrent_facet_change_via_child_op(
+                    "change-variant", "variant.foo=bar",
+                    child_variants=["variant.foo=baz"],
+                    child_pre_op_audit=False,
+                    change_facets=change_facets,
+                    change_variants=change_variants,
+                    change_packages=change_packages)
+
+        def test_concurrent_facet_change_via_change_facets(self):
+                """Verify that we can change facets and sync a child
+                image while inherited facets are changing."""
+
+                change_facets = [
+                    ["facet.A-incorp-sync",
+                        False, None, "parent", False, False],
+                    ["facet.AA-sync", False, None, "parent", False, False],
+                    ["facet.C-incorp", True, None, "local", False, False],
+                ]
+                change_packages = self.__pkg_name_tuples_to_fmris([
+                    ["A-incorp-sync@1", "A-incorp-sync@3"],
+                    ["AA-sync@1",       "AA-sync@4"],
+                    ["B-incorp-sync@1", "B-incorp-sync@2"],
+                    ["BA@1",            "BA@2"],
+                    ["C-incorp@1",      "C-incorp@2"],
+                    ["entire-sync@1",   "entire-sync@2"],
+                ])
+                self.__test_concurrent_facet_change_via_child_op(
+                    "change-facet", "facet.C-incorp=True",
+                    change_facets=change_facets,
+                    change_packages=change_packages)
+
+        def test_concurrent_facet_change_via_detach(self):
+                """Verify that we can detach a child image which has inherited
+                facets that when removed require updating the image."""
+
+                # create parent (0) and pull child (1)
+                self._imgs_create(2)
+
+                # setup the parent image
+                parent_facets = [
+                    "facet.AA-sync=False",
+                    "facet.A-incorp-sync=False",
+                ]
+                parent_pkgs = [
+                    "A-incorp-sync@2",
+                    "AA-sync@1",
+                    "B-incorp-sync@3",
+                    "BA@3",
+                    "C-incorp@3",
+                    "CA@3",
+                    "entire-sync@3",
+                ]
+                self._pkg([0], "change-facet -v %s" % " ".join(parent_facets))
+                self._pkg([0], "install -v %s" % " ".join(parent_pkgs))
+
+                # attach the child.
+                self._attach_parent([1], 0)
+
+                # setup the child image
+                child_facets = [
+                    "facet.C*=False",
+                ]
+                child_pkgs = [
+                    "A-incorp-sync@2",
+                    "AA-sync@1",
+                    "B-incorp-sync@3",
+                    "BA@3",
+                    "C-incorp@2",
+                    "CA@2",
+                    "entire-sync@3",
+                ]
+                self._pkg([1], "change-facet -v %s" % " ".join(child_facets))
+                self._pkg([1], "install -v %s" % " ".join(child_pkgs))
+
+                # a request to detach the child without any package updates
+                # should fail.
+                self._pkg([1], "detach-linked -v --no-pkg-updates",
+                    rv=EXIT_OOPS)
+
+                # detach the child
+                self._pkg([1], "detach-linked -v")
+
+                # verify the contents of the child image
+                child_fmris = self.__pkg_names_to_fmris([
+                    "A-incorp-sync@3",
+                    "AA-sync@3",
+                    "B-incorp-sync@3",
+                    "BA@3",
+                    "C-incorp@2",
+                    "CA@2",
+                    "entire-sync@3",
+                ])
+                self._pkg([1], "list -v %s" % " ".join(child_fmris))
+                output  = "facet.C*\tFalse\tlocal\n"
+                self._pkg([1], "facet -H -F tsv",
+                    output_cb=self._assertEqual_cb(output))
+
+
+class TestLinkedInstallHoldRelax(TestPkgLinked):
+        """Class to test automatic install-hold relaxing of constrained
+        packages when doing different packaging operations.
+
+        When performing packaging operations, any package that has an install
+        hold, but also has dependency on itself in its parent, must have that
+        install hold relaxed if we expect to be able to bring the image in
+        sync with its parent."""
+
+        # the "common" package exists because the solver ignores
+        # install-holds unless the package containing them depends on a
+        # specific version of another package.  so all our packages depend on
+        # the "common" package.
+        p_common = """
+            open common@1,5.11-0
+            close"""
+        p_A_template = """
+            open A@%(ver)d,5.11-0
+            add set name=pkg.depend.install-hold value=A
+            add depend type=require fmri=common
+            add depend type=incorporate fmri=common@1
+            close"""
+        p_B_template = """
+            open B@%(ver)d,5.11-0
+            add set name=variant.foo value=bar value=baz
+            add set name=pkg.depend.install-hold value=B
+            add depend type=parent fmri=feature/package/dependency/self \
+                variant.foo=bar
+            add depend type=require fmri=common
+            add depend type=incorporate fmri=common@1
+            close"""
+        p_C_template = """
+            open C@%(ver)d,5.11-0
+            add set name=pkg.depend.install-hold value=C
+            add depend type=require fmri=common
+            add depend type=incorporate fmri=common@1
+            close"""
+        p_BB_template = """
+            open BB@%(ver)d,5.11-0
+            add depend type=require fmri=B
+            add depend type=incorporate fmri=B@%(ver)d
+            close"""
+        p_BC_template = """
+            open BC@%(ver)d,5.11-0
+            add depend type=require fmri=B
+            add depend type=incorporate fmri=B@%(ver)d
+            add depend type=require fmri=C
+            add depend type=incorporate fmri=C@%(ver)d
+            close"""
+
+        p_data_template = [
+            p_A_template,
+            p_B_template,
+            p_C_template,
+            p_BB_template,
+            p_BC_template,
+        ]
+        p_data = [p_common]
+        for i in range(4):
+                for j in p_data_template:
+                        p_data.append(j % {"ver": (i + 1)})
+        p_fmri = {}
+
+        def setUp(self):
+                self.i_count = 2
+                pkg5unittest.ManyDepotTestCase.setUp(self, ["test"],
+                    image_count=self.i_count)
+
+                # get repo url
+                self.rurl1 = self.dcs[1].get_repo_url()
+
+                # populate repository
+                for p in self.p_data:
+                        fmristr = self.pkgsend_bulk(self.rurl1, p)[0]
+                        f = fmri.PkgFmri(fmristr, "5.11")
+                        pkgstr = "%s@%s" % (f.pkg_name, f.version.release)
+                        self.p_fmri[pkgstr] = fmristr
+
+                # setup image names and paths
+                self.i_name = []
+                self.i_path = []
+                self.i_api = []
+                self.i_api_reset = []
+                for i in range(self.i_count):
+                        name = "system:img%d" % i
+                        self.i_name.insert(i, name)
+                        self.i_path.insert(i, self.img_path(i))
+
+        def __test_linked_install_hold_relax(self, child_pkgs, op, op_args,
+            op_rv=EXIT_OK, variant_out_parent_dep=False, **kwargs):
+                """Verify that all install-holds get relaxed during
+                sync-linked operations."""
+
+                # create parent (0), and pull child (1)
+                self._imgs_create(2)
+
+                # install B@2 in the parent
+                self._pkg([0], "install -v B@2")
+
+                # install A@1 and B@1 in the child
+                self._pkg([1], "install -v %s" % child_pkgs)
+
+                # the parent dependency only exists under variant.foo=bar, if
+                # we change variant.foo the parent dependency should go away.
+                if variant_out_parent_dep:
+                        self._pkg([1], "change-variant variant.foo=baz")
+
+                # link the two images without syncing packages
+                self._attach_parent([1], 0, args="--linked-md-only")
+
+                if variant_out_parent_dep:
+                        # verify the child is synced
+                        self._pkg([1], "audit-linked", rv=EXIT_OK)
+                else:
+                        # verify the child is diverged
+                        self._pkg([1], "audit-linked", rv=EXIT_DIVERGED)
+
+                # execute op
+                def output_cb(output):
+                        if op_rv == EXIT_OK:
+                                self.assertEqualParsable(output, **kwargs)
+                self._pkg([1], "%s --parsable=0 %s" % (op, op_args),
+                    rv=op_rv, output_cb=output_cb)
+
+        def test_linked_install_hold_relax_all(self):
+                """Verify that all install-holds get relaxed during
+                sync-linked operations."""
+
+                # verify that sync-linked operation relaxes the install-hold
+                # in B and syncs it.
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1", "sync-linked", "",
+                    change_packages=[
+                        [self.p_fmri["B@1"], self.p_fmri["B@2"]]])
+
+                # if we remove the parent dependency in B it should no longer
+                # change during sync-linked operation.
+                self.__test_linked_install_hold_relax(
+                    "BC@1", "sync-linked", "", op_rv=EXIT_NOP,
+                    variant_out_parent_dep=True)
+
+        def test_linked_install_hold_relax_constrained_1(self):
+                """Verify that any install-holds which are associated with
+                constrained packages (ie, packages with parent dependencies)
+                get relaxed during install, uninstall and similar
+                operations.
+
+                In our child image we'll install 3 packages, A, B, C, all at
+                version 1.  pkg A, B, and C, all have install holds.  pkg B
+                has a parent dependency and is out of sync.
+
+                We will modify the child image without touching pkg B directly
+                and then verify that the install hold in B gets relaxed, there
+                by allowing the image to be synced."""
+
+                # verify install
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1", "install", "A@2",
+                    change_packages=[
+                        [self.p_fmri["A@1"], self.p_fmri["A@2"]],
+                        [self.p_fmri["B@1"], self.p_fmri["B@2"]]])
+
+                # verify update pkg
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1", "update", "A@2",
+                    change_packages=[
+                        [self.p_fmri["A@1"], self.p_fmri["A@2"]],
+                        [self.p_fmri["B@1"], self.p_fmri["B@2"]]])
+
+                # verify uninstall
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1", "uninstall", "A@1",
+                    remove_packages=[
+                        self.p_fmri["A@1"]],
+                    change_packages=[
+                        [self.p_fmri["B@1"], self.p_fmri["B@2"]]])
+
+                # verify change-variant
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1", "change-variant", "variant.haha=hoho",
+                    change_variants=[
+                        ['variant.haha', 'hoho']],
+                    change_packages=[
+                        [self.p_fmri["B@1"], self.p_fmri["B@2"]]])
+
+                # verify change-facet
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1", "change-facet", "facet.haha=False",
+                    change_facets=[
+                        ['facet.haha', False, None, 'local', False, False]],
+                    change_packages=[
+                        [self.p_fmri["B@1"], self.p_fmri["B@2"]]])
+
+        def test_linked_install_hold_relax_constrained_2(self):
+                """Verify that any install-holds which are not associated with
+                constrained packages (ie, packages with parent dependencies)
+                don't get relaxed during install, uninstall and similar
+                operations.
+
+                In our child image we'll install 4 packages, A, B, C, and BC,
+                all at version 1.  pkg A, B, and C, all have install holds.
+                pkg B has a parent dependency and is out of sync.  pkg BC
+                incorporates B and C and links their versions together.
+
+                The child image is out of sync. we should be able to
+                manipulate it, but we won't be able to bring it in sync
+                because of the install hold in C."""
+
+                # verify install
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1 BC@1", "install", "A@2",
+                    change_packages=[
+                        [self.p_fmri["A@1"], self.p_fmri["A@2"]]])
+
+                # verify update pkg
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1 BC@1", "update", "A@2",
+                    change_packages=[
+                        [self.p_fmri["A@1"], self.p_fmri["A@2"]]])
+
+                # verify uninstall
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1 BC@1", "uninstall", "A@1",
+                    remove_packages=[
+                        self.p_fmri["A@1"]])
+
+                # verify change-variant
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1 BC@1", "change-variant", "variant.haha=hoho",
+                    change_variants=[
+                        ['variant.haha', 'hoho']])
+
+                # verify change-facet
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1 BC@1", "change-facet", "facet.haha=False",
+                    change_facets=[
+                        ['facet.haha', False, None, 'local', False, False]])
+
+        def test_linked_install_hold_relax_constrained_3(self):
+                """Verify that any install-holds which are not associated with
+                constrained packages (ie, packages with parent dependencies)
+                don't get relaxed during install, uninstall and similar
+                operations.
+
+                In our child image we'll install 4 packages, A, B, C, and BC,
+                all at version 1.  pkg A, B, and C, all have install holds.
+                pkg B has a parent dependency and is out of sync.  pkg BC
+                incorporates B and C and links their versions together.
+
+                We'll try to update BC, which should fail because of the
+                install hold in C."""
+
+                # verify install
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1 BC@1", "install", "BC@2", op_rv=EXIT_OOPS)
+
+                # verify update pkg
+                self.__test_linked_install_hold_relax(
+                    "A@1 B@1 C@1 BC@1", "update", "BC@2", op_rv=EXIT_OOPS)
+
+        def test_linked_install_hold_relax_constrained_4(self):
+                """Verify that any install-holds which are not associated with
+                constrained packages (ie, packages with parent dependencies)
+                don't get relaxed during install, uninstall and similar
+                operations.
+
+                In our child image we'll install 1 package, B@1.  pkg B has an
+                install hold and a parent dependency, but its parent
+                dependency is disabled by a variant, so the image is in sync.
+
+                We'll try to install package BC@2, which should fail because
+                of the install hold in B."""
+
+                # verify install
+                self.__test_linked_install_hold_relax(
+                    "B@1", "install", "BC@2", op_rv=EXIT_OOPS,
+                    variant_out_parent_dep=True)
+
 
 class TestPkgLinkedScale(pkg5unittest.ManyDepotTestCase):
         """Test the scalability of the linked image subsystem."""
@@ -1568,8 +3159,8 @@
         ]
 
         # generate packages that do need to be synced
-        p_sunc1_name_gen = "sync1"
-        pkgs = [p_sunc1_name_gen + ver for ver in p_vers]
+        p_sync1_name_gen = "sync1"
+        pkgs = [p_sync1_name_gen + ver for ver in p_vers]
         p_sync1_name = dict(zip(range(len(pkgs)), pkgs))
         for i in p_sync1_name:
                 p_data = "open %s\n" % p_sync1_name[i]
@@ -1667,5 +3258,6 @@
                 # update the parent image and all child images in parallel
                 self.pkg("update -C0 -q")
 
+
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkg_varcet.py	Mon Aug 05 11:24:42 2013 -0700
+++ b/src/tests/cli/t_pkg_varcet.py	Wed Aug 07 15:30:45 2013 -0700
@@ -37,6 +37,7 @@
 import tempfile
 import unittest
 
+from pkg.client.pkgdefs import EXIT_OOPS
 
 class TestPkgVarcet(pkg5unittest.SingleDepotTestCase):
 
@@ -77,6 +78,11 @@
             add file tmp/non-debug path=usr/bin/foo mode=0755 owner=root group=root variant.unknown=foo
             close """
 
+        pkg_need_foo = """
+            open [email protected]
+            add depend type=require fmri=foo
+            """
+
         misc_files = ["tmp/debug", "tmp/non-debug", "tmp/neapolitan",
             "tmp/strawberry", "tmp/doc", "tmp/man", "tmp/pdf"]
 
@@ -168,10 +174,10 @@
 
                 # Verify output for no options and no patterns.
                 exp_def = """\
-facet.doc.* False
-facet.doc.html False
-facet.doc.man False
-facet.doc.txt True
+facet.doc.* False local
+facet.doc.html False local
+facet.doc.man False local
+facet.doc.txt True local
 """
                 self.__assert_facet_matches(exp_def)
 
@@ -181,28 +187,28 @@
 
                 # Matched case for explicitly set.
                 exp_def = """\
-facet.doc.* False
-facet.doc.txt True
+facet.doc.* False local
+facet.doc.txt True local
 """
                 names = ("'facet.doc.[*]'", "doc.txt")
                 self.__assert_facet_matches(exp_def, names=names)
 
                 # Verify -a output.
                 exp_def = """\
-facet.doc.* False
-facet.doc.html False
-facet.doc.man False
-facet.doc.pdf False
-facet.doc.txt True
+facet.doc.* False local
+facet.doc.html False local
+facet.doc.man False local
+facet.doc.pdf False local
+facet.doc.txt True local
 """
                 opts = ("-a",)
                 self.__assert_facet_matches(exp_def, opts=opts)
 
                 # Matched case for explicitly set and those in packages.
                 exp_def = """\
-facet.doc.* False
-facet.doc.pdf False
-facet.doc.txt True
+facet.doc.* False local
+facet.doc.pdf False local
+facet.doc.txt True local
 """
                 names = ("'facet.doc.[*]'", "*pdf", "facet.doc.txt")
                 opts = ("-a",)
@@ -210,9 +216,9 @@
 
                 # Verify -i output.
                 exp_def = """\
-facet.doc.man False
-facet.doc.pdf False
-facet.doc.txt True
+facet.doc.man False local
+facet.doc.pdf False local
+facet.doc.txt True local
 """
                 opts = ("-i",)
                 self.__assert_facet_matches(exp_def, opts=opts)
@@ -223,15 +229,15 @@
 
                 # Matched case in packages.
                 exp_def = """\
-facet.doc.man False
-facet.doc.pdf False
+facet.doc.man False local
+facet.doc.pdf False local
 """
                 names = ("'facet.*[!t]'",)
                 opts = ("-i",)
                 self.__assert_facet_matches(exp_def, opts=opts, names=names)
 
                 exp_def = """\
-facet.doc.pdf False
+facet.doc.pdf False local
 """
                 names = ("'*pdf'",)
                 opts = ("-i",)
@@ -242,10 +248,10 @@
                 self.pkg("uninstall foo")
 
                 exp_def = """\
-facet.doc.* False
-facet.doc.html False
-facet.doc.man False
-facet.doc.txt True
+facet.doc.* False local
+facet.doc.html False local
+facet.doc.man False local
+facet.doc.txt True local
 """
 
                 # Output should be the same for both -a and default cases with
@@ -300,10 +306,10 @@
                     "facet.doc.html=False facet.doc.txt=True")
 
                 exp_def = """\
-facet.doc.* False
-facet.doc.html False
-facet.doc.man False
-facet.doc.txt True
+facet.doc.* False local
+facet.doc.html False local
+facet.doc.man False local
+facet.doc.txt True local
 """
 
                 # Output should be the same for both -a and default cases with
@@ -318,10 +324,10 @@
 
                 # Verify output for no options and no patterns.
                 exp_def = """\
-facet.doc.* False
-facet.doc.html False
-facet.doc.man False
-facet.doc.txt True
+facet.doc.* False local
+facet.doc.html False local
+facet.doc.man False local
+facet.doc.txt True local
 """
                 self.__assert_facet_matches(exp_def)
 
@@ -586,6 +592,41 @@
 """ % variants
                         self.__assert_variant_matches(exp_def, opts=opts)
 
+        def test_02_varcet_reject(self):
+                """Verify that if we try to --reject packages that should get
+                removed we invoke the solver."""
+
+                # create an image
+                variants = { "variant.icecream": "strawberry" }
+                self.image_create(self.rurl, variants=variants)
+
+                # install a package with a dependency
+                self.pkg("install [email protected]")
+
+                # Set some facets/variant while rejecting a random package.
+                self.pkg("change-facet --reject=nothing "
+                    "facet.doc.txt=True")
+                self.pkg("change-variant --reject=nothing "
+                    "variant.icecream=neapolitan")
+
+                # Reset the facets/variant to the same value (which would
+                # normally be a noop) while rejecting a package and make sure
+                # that package gets uninstalled.
+                self.pkg("change-facet --reject=need_foo "
+                    "facet.doc.txt=True")
+                self.pkg("install [email protected]")
+                self.pkg("change-variant --reject=need_foo "
+                    "variant.icecream=neapolitan")
+                self.pkg("install [email protected]")
+
+                # Reset the facets/variant to the same value (which would
+                # normally be a noop) while rejecting a package we can't
+                # uninstall due to dependencies.  Make sure this fails.
+                self.pkg("change-facet --reject=foo "
+                    "facet.doc.txt=True", exit=EXIT_OOPS)
+                self.pkg("change-variant --reject=foo "
+                    "variant.icecream=neapolitan", exit=EXIT_OOPS)
+
 
 if __name__ == "__main__":
         unittest.main()