16297554 pkg history fails when time travelling s11-update s11u2b26
authorthejaswini.k@oracle.com
Fri, 11 Oct 2013 16:09:34 +0100
branchs11-update
changeset 2966 6a97d8a803d3
parent 2960 31f0f7c8a805
child 2972 6de5f42d5211
16297554 pkg history fails when time travelling 15779714 pkgdepend usage statement missing some resolve options 16462757 pkgdepend needs to support Python 3 17355511 pkg info displays human-version even when identical to actual version 17336089 modules/server/repository.py is missing a module import 16603436 pkglint should warn for facet values other than true or all 17466879 mirror service instances don't cope with more than one publisher 16941886 pkg.depot-config needs to take more care when destroying old configurations 16922545 build failure causes pkg/depot user interface to die 15776364 pkg search -r /usr/bin/* causes a traceback 15433013 content hash handling should handle different hash functions 17477111 resync manpages from docs group 17562108 resync manpages from docs group
src/client.py
src/depot-config.py
src/depot.py
src/man/packagemanager.1
src/man/pkg.1
src/man/pkg.5
src/man/pkg.depot-config.1m
src/man/pkg.depotd.1m
src/man/pkg.sysrepo.1m
src/man/pkgdepend.1
src/man/pkgdiff.1
src/man/pkgfmt.1
src/man/pkglint.1
src/man/pkgmerge.1
src/man/pkgmogrify.1
src/man/pkgrecv.1
src/man/pkgrepo.1
src/man/pkgsend.1
src/man/pkgsign.1
src/man/pkgsurf.1
src/man/pm-updatemanager.1
src/modules/actions/file.py
src/modules/actions/generic.py
src/modules/actions/license.py
src/modules/actions/signature.py
src/modules/catalog.py
src/modules/client/api.py
src/modules/client/api_errors.py
src/modules/client/image.py
src/modules/client/imageplan.py
src/modules/client/publisher.py
src/modules/client/query_parser.py
src/modules/client/transport/transport.py
src/modules/depotcontroller.py
src/modules/digest.py
src/modules/elf.c
src/modules/elfextract.c
src/modules/elfextract.h
src/modules/flavor/base.py
src/modules/flavor/depthlimitedmf.py
src/modules/flavor/depthlimitedmf27.py
src/modules/flavor/elf.py
src/modules/flavor/python.py
src/modules/lint/pkglint_action.py
src/modules/lint/pkglint_manifest.py
src/modules/manifest.py
src/modules/misc.py
src/modules/p5p.py
src/modules/p5s.py
src/modules/query_parser.py
src/modules/search_storage.py
src/modules/server/api.py
src/modules/server/depot.py
src/modules/server/query_parser.py
src/modules/server/repository.py
src/modules/server/transaction.py
src/pkg/manifests/package:pkg.p5m
src/pkgdep.py
src/pkgrepo.py
src/publish.py
src/pull.py
src/sign.py
src/svc/svc-pkg-mirror
src/sysrepo.py
src/tests/api/t_api_search.py
src/tests/api/t_elf.py
src/tests/api/t_manifest.py
src/tests/api/t_p5p.py
src/tests/api/t_pkglint.py
src/tests/cli/t_depot_config.py
src/tests/cli/t_https.py
src/tests/cli/t_pkg_info.py
src/tests/cli/t_pkg_install.py
src/tests/cli/t_pkg_publisher.py
src/tests/cli/t_pkg_refresh.py
src/tests/cli/t_pkg_revert.py
src/tests/cli/t_pkg_search.py
src/tests/cli/t_pkg_sysrepo.py
src/tests/cli/t_pkgdep.py
src/tests/cli/t_pkgrecv.py
src/tests/cli/t_pkgrepo.py
src/tests/cli/t_pkgsend.py
src/tests/cli/t_pkgsign.py
src/tests/cli/t_pkgsurf.py
src/tests/cli/t_sysrepo.py
src/tests/pkg5unittest.py
src/util/apache2/depot/depot_index.py
src/web/en/search.shtml
--- a/src/client.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/client.py	Fri Oct 11 16:09:34 2013 +0100
@@ -624,18 +624,18 @@
                         not_installed = []
                         try:
                                 for entry in api_inst.get_pkg_list(
-				    api.ImageInterface.LIST_INSTALLED,
+                                    api.ImageInterface.LIST_INSTALLED,
                                     patterns=e.notfound, raise_unmatched=True):
                                         pub, stem, ver = entry[0]
                                         no_updates.append(stem)
                         except api_errors.InventoryException, exc:
                                 not_installed = exc.notfound
 
-			err_str = ""
+                        err_str = ""
                         if len(not_installed) == 1:
-				err_str = _("No package matching '%s'"
-					  " is installed. ") % \
-					  not_installed[0]
+                                err_str = _("No package matching '%s'"
+                                    " is installed. ") % \
+                                    not_installed[0]
                         elif not_installed:
                                 err_str = _("No packages matching '%s'"
                                     " are installed. ") % \
@@ -649,8 +649,8 @@
                                 err_str = err_str + _("No updates are available"
                                           " for packages '%s'.") % \
                                             ", ".join(no_updates)
-			if err_str:
-                                error(err_str, cmd=op) 
+                        if err_str:
+                                error(err_str, cmd=op)
 
                 if found and e.notfound:
                         # Only some patterns matched.
@@ -1479,7 +1479,7 @@
 
                 try:
                         salvaged = api_inst.describe().salvaged
-			newbe = api_inst.describe().new_be
+                        newbe = api_inst.describe().new_be
                         if salvaged and (rval == EXIT_OK or not newbe):
                                 # Only show salvaged file list if populated
                                 # and operation was successful, or if operation
@@ -2708,6 +2708,9 @@
         except api_errors.BooleanQueryException, e:
                 error(e)
                 return EXIT_OOPS
+        except api_errors.ParseError, e:
+                error(e)
+                return EXIT_OOPS
 
         good_res = False
         bad_res = False
@@ -2997,7 +3000,7 @@
                 # XXX even more info on the publisher would be nice?
                 msg(_("     Publisher:"), pi.publisher)
                 hum_ver = pi.get_attr_values("pkg.human-version")
-                if hum_ver and hum_ver[0] != pi.version:
+                if hum_ver and hum_ver[0] != str(pi.version):
                         msg(_("       Version:"), "%s (%s)" %
                             (pi.version, hum_ver[0]))
                 else:
@@ -3753,9 +3756,9 @@
                                 remove_mirrors.add(misc.parse_uri(arg,
                                     cwd=orig_cwd))
                 elif opt == "-p":
-			if repo_uri:
+                        if repo_uri:
                                 usage(_("The -p option can be specified only "
-				    "once."), cmd=cmd_name) 
+                                    "once."), cmd=cmd_name)
                         repo_uri = misc.parse_uri(arg, cwd=orig_cwd)
                 elif opt in ("-P", "--search-first"):
                         search_first = True
@@ -5288,9 +5291,9 @@
 
         for opt, arg in opts:
                 if opt in ("-p", "--publisher"):
-			if pub_url:
+                        if pub_url:
                                 usage(_("The -p option can be specified only "
-				    "once."), cmd=cmd_name)
+                                    "once."), cmd=cmd_name)
                         try:
                                 pub_name, pub_url = arg.split("=", 1)
                         except ValueError:
@@ -5623,11 +5626,8 @@
                 dt_start = misc.timestamp_to_datetime(he.operation_start_time)
                 dt_end = misc.timestamp_to_datetime(he.operation_end_time)
                 if dt_start > dt_end:
-                        error(_("History operation appeared to end before it "
-                            "started.  Start time: %(start_time)s, "
-                            "End time: %(end_time)s") %
-                            (output["start"], output["finish"]), cmd="history")
-                        return EXIT_OOPS
+                        output["finish"] = _("%s (clock drift detected)") % \
+                            output["finish"]
 
                 output["time"] = dt_end - dt_start
                 # This should never happen.  We can't use timedelta's str()
@@ -6092,6 +6092,10 @@
                 elif opt in ("--help", "-?"):
                         show_usage = True
 
+        # The globals in pkg.digest can be influenced by debug flags
+        if DebugValues:
+                reload(pkg.digest)
+
         subcommand = None
         if pargs:
                 subcommand = pargs.pop(0)
--- a/src/depot-config.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/depot-config.py	Fri Oct 11 16:09:34 2013 +0100
@@ -356,13 +356,14 @@
                 raise DepotException(
                     _("Unable to write publisher response: %s") % err)
 
-def cleanup_conf(runtime_dir=None):
-        """Destroys an old configuration."""
+def cleanup_htdocs(htdocs_dir):
+        """Destroy any existing "htdocs" directory."""
         try:
-                shutil.rmtree(runtime_dir, ignore_errors=True)
+                shutil.rmtree(htdocs_dir, ignore_errors=True)
         except OSError, err:
                 raise DepotException(
-                    _("Unable to cleanup old configuration: %s") % err)
+                    _("Unable to remove an existing 'htdocs' directory "
+                    "in the runtime directory: %s") % err)
 
 def refresh_conf(repo_info, log_dir, host, port, runtime_dir,
             template_dir, cache_dir, cache_size, sroot, fragment=False,
@@ -370,12 +371,12 @@
         """Creates a new configuration for the depot."""
         try:
                 ret = EXIT_OK
-                cleanup_conf(runtime_dir=runtime_dir)
                 if not repo_info:
                         raise DepotException(_("no repositories found"))
 
                 htdocs_path = os.path.join(runtime_dir, DEPOT_HTDOCS_DIRNAME,
                     sroot)
+                cleanup_htdocs(htdocs_path)
                 misc.makedirs(htdocs_path)
 
                 # pubs and default_pubs are lists of tuples of the form:
--- a/src/depot.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/depot.py	Fri Oct 11 16:09:34 2013 +0100
@@ -89,6 +89,9 @@
 from cherrypy.process.plugins import Daemonizer
 
 from pkg.misc import msg, emsg, setlocale
+from pkg.client.debugvalues import DebugValues
+
+import pkg
 import pkg.client.api_errors as api_errors
 import pkg.config as cfg
 import pkg.portable.util as os_util
@@ -164,7 +167,8 @@
                         operations, simply 'search'.
         --debug         The name of a debug feature to enable; or a whitespace
                         or comma separated list of features to enable.
-                        Possible values are: headers.
+                        Possible values are: headers, hash=sha1+sha256,
+                        hash=sha256
         --image-root    The path to the image whose file information will be
                         used as a cache for file data.
         --log-access    The destination for any access related information
@@ -314,6 +318,16 @@
                                 else:
                                         features = arg.split()
                                 debug_features.extend(features)
+
+                                # We also allow key=value debug flags, which
+                                # get set in pkg.client.debugvalues
+                                for feature in features:
+                                        try:
+                                                key, val = feature.split("=", 1)
+                                                DebugValues.set_value(key, val)
+                                        except (AttributeError, ValueError):
+                                                pass
+
                         elif opt == "--disable-ops":
                                 if arg is None or arg == "":
                                         raise OptionError, \
@@ -485,6 +499,9 @@
                 if addresses:
                         ivalues["pkg"]["address"] = list(addresses)
 
+                if DebugValues:
+                        reload(pkg.digest)
+
                 # Build configuration object.
                 dconf = ds.DepotConfig(target=user_cfg, overrides=ivalues)
         except getopt.GetoptError, _e:
--- a/src/man/packagemanager.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/packagemanager.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
-.\" Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
-.TH packagemanager 1 "27 May 2012" "SunOS 5.11" "User Commands"
+.\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+.TH packagemanager 1 "21 May 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 packagemanager \- GUI for the Image Packaging System
 .SH SYNOPSIS
@@ -281,7 +281,7 @@
 Package Manager online help
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
 .SH NOTES
 .sp
 .LP
--- a/src/man/pkg.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkg.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkg 1 "19 Apr 2013" "SunOS 5.11" "User Commands"
+.TH pkg 1 "04 Oct 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkg \- Image Packaging System retrieval client
 .SH SYNOPSIS
@@ -209,7 +209,7 @@
     [--set-property \fIname_of_property\fR=\fIvalue\fR]
     [--add-property-value \fIname_of_property\fR=\fIvalue_to_add\fR]
     [--remove-property-value \fIname_of_property\fR=\fIvalue_to_remove\fR]
-    [--unset-property \fIname_of_property_to_delete\fR] 
+    [--unset-property \fIname_of_property_to_delete\fR]
     [--proxy \fIproxy_to_use\fR] \fIpublisher\fR
 .fi
 
@@ -319,10 +319,6 @@
 .na
 \fB\fB-R\fR \fIdir\fR\fR
 .ad
-.br
-.na
-\fB\fB--image-dir\fR \fIdir\fR\fR
-.ad
 .sp .6
 .RS 4n
 Operate on the image rooted at \fIdir\fR. If no directory was specified or determined based on environment, the default is /. See the "Environment Variables" section for more information.
@@ -715,7 +711,7 @@
 .ad
 .sp .6
 .RS 4n
-List installed packages and the newest version of packages that are available for installation. Packages are considered to be available for installation if they are allowed by the installed incorporations and by the image's variants. If one or more patterns are specified, then the newest version matching the specified pattern and allowed by any installed incorporations and the image's variants is listed. Without \fB-a\fR, list only installed packages.
+List installed packages and list the newest version of packages that are not installed but could be installed in this image. Packages can be installed if they are allowed by the installed incorporations and by the image's variants. If one or more patterns are specified, then the newest version matching the specified pattern and allowed by any installed incorporations and the image's variants is listed. Without \fB-a\fR, list only installed packages.
 .RE
 
 .sp
@@ -759,7 +755,7 @@
 .ad
 .sp .6
 .RS 4n
-Do not list any packages, but return failure if there are any fatal errors.
+Do not list any packages, but return failure if a fatal error occurs.
 .RE
 
 .sp
@@ -770,7 +766,7 @@
 .ad
 .sp .6
 .RS 4n
-Display a one-line short-form giving the package name and summary. This option can be used with \fB-a\fR, \fB-n\fR, \fB-u\fR or \fB-v\fR.
+Display a one-line short-form giving the package name and summary. This option can be used with \fB-a\fR, \fB-n\fR, or \fB-u\fR.
 .RE
 
 .sp
@@ -781,7 +777,7 @@
 .ad
 .sp .6
 .RS 4n
-List only packages with newer versions available. This option cannot be used with \fB-g\fR.
+List installed packages that have newer versions available. This option cannot be used with \fB-g\fR.
 .RE
 
 .sp
@@ -812,7 +808,7 @@
 .ne 2
 .mk
 .na
-\fB\fBpkg info\fR [\fB-lr\fR] [\fB-g\fR \fIpath_or_uri\fR]... [\fB--license\fR] [\fIpkg_fmri_pattern\fR ...]\fR
+\fB\fBpkg info\fR [\fB-lqr\fR] [\fB-g\fR \fIpath_or_uri\fR]... [\fB--license\fR] [\fIpkg_fmri_pattern\fR ...]\fR
 .ad
 .sp .6
 .RS 4n
@@ -858,7 +854,7 @@
 .ad
 .sp .6
 .RS 4n
-Do not display any package information, but return failure if there are any fatal errors.
+Do not display any package information, but return failure if a fatal error occurs.
 .RE
 
 .sp
@@ -880,7 +876,7 @@
 .ad
 .sp .6
 .RS 4n
-Display the license texts for the packages. This option can be combined with \fB-l\fR, \fB-q\f, or R\fB-r\fR. When all patterns match a known package and have licenses, the command will return success. If one or more patterns are unmatched or match packages that do not have licenses, the command will return failure.
+Display the license texts for the packages. This option can be combined with \fB-l\fR, \fB-q\fR, or \fB-r\fR. Return success if all \fIpkg_fmri_pattern\fR patterns match known packages and have licenses. Return failure if one or more patterns are unmatched or match packages that do not have licenses.
 .RE
 
 .RE
@@ -1095,7 +1091,7 @@
 .ad
 .sp .6
 .RS 4n
-Search for matches to the \fIquery\fR, and display the results. See the description of \fIquery\fR below.
+Search for actions that match \fIquery\fR, and display the matching search index, action name, action value, and package name. See the description of \fIquery\fR below. Some searches might yield duplicate results.
 .sp
 .ne 2
 .mk
@@ -1185,7 +1181,7 @@
 .ad
 .sp .6
 .RS 4n
-Control the columns of the results. The \fB-o\fR option can be specified multiple times, or multiple attributes can be specified as the argument to one \fB-o\fR option by separating the attribute names with commas. In addition to the pseudo attributes outlined above, the following attributes are defined for search results:
+Control the columns of the results. The \fB-o\fR option can be specified multiple times, or multiple attributes can be specified as the argument to one \fB-o\fR option by separating the attribute names with commas. In addition to the pseudo attributes outlined above, the following attributes are defined for search results. These attributes help show why a particular result was a match:
 .sp
 .ne 2
 .mk
@@ -1229,11 +1225,13 @@
 .ad
 .sp .6
 .RS 4n
-By default, \fIquery\fR is interpreted as a series of terms to be matched exactly. The \fB?\fR and \fB*\fR characters can be used as \fBglob\fR(3C)\(hystyle wildcards, allowing more flexible query matches.
-.sp
-In addition to simple token matching and wildcard search, a more complicated query language is supported. Phrases can be searched for by using single or double quotation marks (\fB\&'\fR or \fB"\fR). Be sure to take your shell into account so that \fBpkg\fR actually sees the \fB\&'\fR or \fB"\fR.
-.sp
-Boolean search using AND and OR is supported.
+By default, \fIquery\fR is interpreted as a series of terms to be matched exactly and multiple terms are ANDed.
+.sp
+AND and OR are supported.
+.sp
+The \fB?\fR and \fB *\fR characters can be used as \fBglob\fR(3C)\(hystyle wildcards, allowing more flexible query matches.
+.sp
+In addition to simple token matching and wildcard search, a more complicated query language is supported. Phrases can be searched for by using single or double quotation marks (\fB'\fR or \fB"\fR). Be sure to take your shell into account so that \fBpkg\fR actually sees the \fB'\fR or \fB"\fR.
 .sp
 Which tokens are indexed is action-dependent, but can include content hashes and path names. For information about actions and their attributes, see "Actions" in the \fBpkg\fR(5) man page. See also the list of pseudo attribute names in \fBpkg contents\fR and \fB-o\fR above.
 .sp
@@ -1347,7 +1345,7 @@
 .ad
 .sp .6
 .RS 4n
-Print nothing, but return failure if there are any fatal errors.
+Suppress progress messages and all other output during the requested operation.
 .RE
 
 .sp
@@ -1430,7 +1428,7 @@
 .ad
 .sp .6
 .RS 4n
-Revert all files tagged with \fItag-name\fR, and remove any unpackaged files or directories that are under directories with this tag and that match \fIpattern\fR. See the description of the \fBrevert-tag\fR attribute in "File Actions" and "Directory Actions" in the \fBpkg\fR(5) man page for more information about \fItag-name\fR and \fIpattern\fR.
+Revert all files tagged with \fItag-name\fR, and remove any unpackaged files or directories that are under directories with this tag and that match \fIpattern\fR. See the description of the \fBrevert-tag\fR attribute in "File Actions" and "Directory Actions" in the \fBpkg\fR(5) man page for more information about \fBtag-name\fR and \fIpattern\fR.
 .RE
 
 .sp
@@ -1482,7 +1480,7 @@
 .ne 2
 .mk
 .na
-\fB\fB-F\fR\fR
+\fB\fB-F\fR \fIformat\fR\fR
 .ad
 .sp .6
 .RS 4n
@@ -1596,7 +1594,7 @@
 .ne 2
 .mk
 .na
-\fB\fB-F\fR\fR
+\fB\fB-F\fR \fIformat\fR\fR
 .ad
 .sp .6
 .RS 4n
@@ -1622,7 +1620,7 @@
 .ad
 .sp .6
 .RS 4n
-Display all variants explicitly set in the image and all variants that are listed in installed packages. This option cannot be combined with \fB-i\fR.
+Display all variants explicitly set in the image and all variants that are listed in installed packages. The \fB-a\fR option cannot be combined with the \fB-i\fR option.
 .RE
 
 .sp
@@ -1633,7 +1631,7 @@
 .ad
 .sp .6
 .RS 4n
-Display all variants that are listed in installed packages. This option cannot be combined with \fB-a\fR.
+Display all variants that are listed in installed packages. The \fB-i\fR option cannot be combined with the \fB-a\fR option.
 .RE
 
 .sp
@@ -1644,7 +1642,7 @@
 .ad
 .sp .6
 .RS 4n
-Display the possible variant values that can be set for installed packages. This option can be combined with \fB-a\fR and \fB-i\fR.
+Display the possible variant values that can be set for installed packages. The \fB-v\fR option can be combined with the \fB-a\fR or \fB-i\fR option.
 .RE
 
 .RE
@@ -1672,7 +1670,7 @@
 .ad
 .sp .6
 .RS 4n
-Display the current values and source of all facets that have been explicitly set in this image by using the \fBpkg change-facet\fR command or inherited from a parent image. See "Facets and Variants" in the \fBpkg\fR(5) man page for more information about facets.
+Display the current values and source of all facets that have been explicitly set in this image by using the \fBpkg change-facet\fR command or that have been inherited from a parent image . See "Facets and Variants" in the \fBpkg\fR(5) man page for more information about facets.
 .sp
 .ne 2
 .mk
@@ -1688,7 +1686,7 @@
 .ne 2
 .mk
 .na
-\fB\fB-F\fR\fR
+\fB\fB-F\fR \fIformat\fR\fR
 .ad
 .sp .6
 .RS 4n
@@ -1714,7 +1712,7 @@
 .ad
 .sp .6
 .RS 4n
-Display all facets explicitly set in the image and all facets that are listed in installed packages. This option cannot be combined with \fB-i\fR.
+Display all facets explicitly set in the image and all facets that are listed in installed packages. The \fB-a\fR option cannot be combined with the \fB-i\fR option.
 .RE
 
 .sp
@@ -1725,7 +1723,7 @@
 .ad
 .sp .6
 .RS 4n
-Display all facets that are listed in installed packages. This option cannot be combined with \fB-a\fR.
+Display all facets that are listed in installed packages. The \fB-i\fR option cannot be combined with the \fB-a\fR option.
 .RE
 
 .sp
@@ -1736,7 +1734,7 @@
 .ad
 .sp .6
 .RS 4n
-Include masked facets in the output. Include a column indicating which (if any) facets are masked.
+Include masked facets in the output. Include a column that indicates which, if any, facets are masked.
 .RE
 
 .RE
@@ -1986,6 +1984,17 @@
 .ne 2
 .mk
 .na
+\fB\fB-F\fR \fIformat\fR\fR
+.ad
+.sp .6
+.RS 4n
+Specify an alternative output format. Currently, only \fBtsv\fR (Tab Separated Values) is valid.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
 \fB\fB-H\fR\fR
 .ad
 .sp .6
@@ -2015,17 +2024,6 @@
 Display only enabled publishers.
 .RE
 
-.sp
-.ne 2
-.mk
-.na
-\fB\fB-F\fR\fR
-.ad
-.sp .6
-.RS 4n
-Specify an alternative output format. Currently, only \fBtsv\fR (Tab Separated Values) is valid.
-.RE
-
 .RE
 
 .sp
@@ -2104,7 +2102,7 @@
 .ad
 .sp .6
 .RS 4n
-Add the specified certificate as a CA certificate that is trusted. The hashes of the PEM representation of the user-approved CA certificates are listed in the detailed output of the \fBpkg publisher\fR command.
+For verifying signed packages, add the specified certificate as a CA certificate that is trusted. The hashes of the PEM representation of the user-approved CA certificates are listed in the detailed output of the \fBpkg publisher\fR command.
 .RE
 
 .sp
@@ -2115,7 +2113,7 @@
 .ad
 .sp .6
 .RS 4n
-Treat the certificate with the given hash of its PEM representation as revoked. The hashes of the user-revoked CA certificates are listed in the detailed output of the \fBpkg publisher\fR command.
+For verifying signed packages, treat the certificate with the given hash of its PEM representation as revoked. The hashes of the user-revoked CA certificates are listed in the detailed output of the \fBpkg publisher\fR command.
 .RE
 
 .sp
@@ -2126,7 +2124,7 @@
 .ad
 .sp .6
 .RS 4n
-Remove the certificate with the given hash from the list of approved certificates and the list of revoked certificates.
+For verifying signed packages, remove the certificate with the given hash from the list of approved certificates and the list of revoked certificates.
 .RE
 
 .sp
@@ -2315,7 +2313,9 @@
 .ad
 .sp .6
 .RS 4n
-Use the specified web proxy URI to retrieve content for the specified origin (\fB-g\fR) or mirror (\fB-m\fR). The proxy value is stored as part of the publisher configuration. At run time, \fB$http_proxy\fR or related environment variables override this proxy setting. See the \fBcurl\fR(1) man page for the list of accepted environment variable names.
+Use the specified proxy URI to retrieve content for the specified origin (\fB-g\fR) or mirror (\fB-m\fR). The proxy value is stored as part of the publisher configuration, which means the system repository used by child images is automatically updated. This option cannot be used to set an authenticated proxy. The \fIproxy_to_use\fR value cannot have the form \fBprotocol://user:password@host\fR.
+.sp
+At run time, \fBhttp_proxy\fR or related environment variables override this proxy setting. See the "Environment" section of the \fBcurl\fR(1) man page for the list of accepted environment variable names. If you use an environment variable to set the proxy URI, you must also set the appropriate proxy property of the \fBsvc:/application/pkg/system-repository\fR SMF service to the same value. See "Specifying a Proxy" in \fIAdding and Updating Software in Oracle Solaris 11.2\fR.
 .RE
 
 .RE
@@ -2896,7 +2896,7 @@
 .ad
 .sp .6
 .RS 4n
-(string) Determine what checks will be performed on manifests when installing, updating, modifying, or verifying packages in the image. The final policy applied to a package depends on the combination of image policy and publisher policy. The combination will be at least as strict as the stricter of the two policies taken individually. By default, the package client does not check whether certificates have been revoked. To enable those checks, which might require the client to contact external web sites, set the \fBcheck-certificate-revocation\fR image property to \fBTrue\fR. The following values are allowed:
+(string) Determine what checks will be performed on manifests when installing, updating, modifying, or verifying packages in the image. The final policy applied to a package depends on the combination of image policy and publisher policy. The combination will be at least as strict as the stricter of the two policies taken individually. By default, the package client does not check whether certificates have been revoked. To enable those checks, which might require the client to contact external Internet sites, set the \fBcheck-certificate-revocation\fR image property to \fBTrue\fR. The following values are allowed:
 .sp
 .ne 2
 .mk
@@ -3585,7 +3585,7 @@
 .ad
 .sp .6
 .RS 4n
-Seconds below the \fBlowspeed\fR limit (1024 bytes/sec) during transport operations before the client aborts the operation. A value of 0 means do not abort the operation.
+Seconds below the \fBlowspeed\fR limit (1024 bytes/second) during transport operations before the client aborts the operation. A value of 0 means do not abort the operation.
 .sp
 Default value: 30
 .RE
@@ -3834,10 +3834,10 @@
 .SH SEE ALSO
 .sp
 .LP
-\fBpkgsend\fR(1), \fBpkg.depotd\fR(1M), \fBglob\fR(3C), \fBpkg\fR(5), \fBbeadm\fR(1M)
+\fBpkgsend\fR(1), \fBbeadm\fR(1M), \fBpkg.depotd\fR(1M), \fBpkg.sysrepo\fR(1M), \fBglob\fR(3C), \fBpkg\fR(5)
 .sp
 .LP
-\fIAdding and Updating Oracle Solaris 11.1 Software Packages\fR
+\fIAdding and Updating Software in Oracle Solaris 11.2\fR
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkg.5	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkg.5	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
 .\" Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkg 5 "1 Mar 2013" "SunOS 5.11" "Standards, Environments, and Macros"
+.TH pkg 5 "07 Aug 2013" "SunOS 5.11" "Standards, Environments, and Macros"
 .SH NAME
 pkg \- Image Packaging System
 .SH DESCRIPTION
@@ -90,7 +90,7 @@
 .ad
 .RS 9n
 .rt  
-The file system path where the file is installed. This is a \fBfile\fR action's key attribute. 
+The file system path where the file is installed. This is a \fBfile\fR action's key attribute.
 .RE
 
 .sp
@@ -101,7 +101,7 @@
 .ad
 .RS 9n
 .rt  
-The access permissions (in numeric form) of the file. These are simple permissions only, not ACLs. 
+The access permissions (in numeric form) of the file. These are simple permissions only, not ACLs.
 .RE
 
 .sp
@@ -112,7 +112,7 @@
 .ad
 .RS 9n
 .rt  
-The name of the user that owns the file. 
+The name of the user that owns the file.
 .RE
 
 .sp
@@ -123,7 +123,7 @@
 .ad
 .RS 9n
 .rt  
-The name of the group that owns the file. 
+The name of the group that owns the file.
 .RE
 
 .sp
@@ -131,7 +131,7 @@
 The payload is a positional attribute in that it is not named. It is the first word after the action name. In a published manifest, it is the \fBSHA-1\fR hash of the file contents. If present in a manifest that has yet to be published, it represents the path where the payload can be found. See \fBpkgsend\fR(1). The hash attribute can be used instead of the positional attribute, should the value include an equals sign. Both can be used in the same action. However, the hashes must be identical.
 .sp
 .LP
-Other attributes include:
+The \fBpreserve\fR and \fBoverlay\fR attributes affect whether and how a \fBfile\fR action is installed.
 .sp
 .ne 2
 .mk
@@ -140,15 +140,101 @@
 .ad
 .sp .6
 .RS 4n
-This specifies that the file's contents should not be overwritten on upgrade if the contents are determined to have changed since the file was installed or last upgraded. On initial installs, if an existing file is found, the file is salvaged (stored in \fB/var/pkg/lost+found\fR).
+Specifies when and how files are preserved during package operations.
+.sp
+When a package is initially installed, if a file delivered by the package has a \fBpreserve\fR attribute defined with any value and the file already exists in the image, the existing file is stored in \fB/var/pkg/lost+found\fR and the packaged file is installed.
 .sp
-If the value of \fBpreserve\fR is \fBrenameold\fR, then the existing file is renamed with the extension \fB\&.old\fR, and the new file is put in its place.
+When a package is initially installed, if a file delivered by the package has a \fBpreserve\fR attribute defined and the file does not already exist in the image, whether that file is installed depends on the value of the \fBpreserve\fR attribute:
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the value of \fBpreserve\fR is \fBlegacy\fR, the packaged file is not installed.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the value of \fBpreserve\fR is not \fBlegacy\fR, the packaged file is installed.
+.RE
+When a package is downgraded, if a file delivered by the downgraded version of the package has a \fBpreserve\fR attribute defined with any value and all of the following conditions are true, the file that currently exists in the image is renamed with the extension \fB\&.update\fR, and the file from the downgraded package is installed.
+.RS +4
+.TP
+.ie t \(bu
+.el o
+The file exists in the image.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+The content of the file delivered by the downgraded version of the package is different from the content of the file delivered by the currently installed version of the package.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+The content of the file delivered by the downgraded version of the package is different from the content of the file that exists in the image.
+.RE
+If any of the above conditions is not true, the file is treated the same as if the package is being upgraded, rather than downgraded.
+.sp
+When a package is upgraded, if a \fBfile\fR action delivered by the upgraded version of the package has a \fBpreserve\fR attribute defined with any value and the \fBfile\fR action is the same as the \fBfile\fR action delivered by the currently installed version of the package, the file is not installed, and the file that exists in the image is not modified. Any modifications made since the previous version was installed are preserved.
 .sp
-If the value of \fBpreserve\fR is \fBrenamenew\fR, then the existing file is left alone, and the new file is installed with the extension \fB\&.new\fR.
-.sp
-If the value of \fBpreserve\fR is \fBlegacy\fR, then this file is not installed for initial package installs. On upgrades, any existing file is renamed with the extension \fB\&.legacy\fR, and then the new file is put in its place.
-.sp
-If the value of \fBpreserve\fR is \fBtrue\fR (or a value not listed above, such as \fBstrawberry\fR), then the existing file is left alone, and the new file is not installed.
+When a package is upgraded, if a \fBfile\fR action delivered by the upgraded version of the package has a \fBpreserve\fR attribute defined and the \fBfile\fR action is new or is different from the \fBfile\fR action delivered by the currently installed version of the package, the upgrade is done in the following way:
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file does not exist in the image, the new file is installed.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package exists in the image, did not exist in the currently installed version of the package, and was not renamed or moved by using the \fBoriginal_name\fR attribute (see below), then the existing file is stored in \fB/var/pkg/lost+found\fR and the file delivered by the upgraded version of the package is installed.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package exists in the image and has different content from the file delivered by the currently installed version of the package, the upgrade is done according to the value of the \fBpreserve\fR attribute:
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package has a \fBpreserve\fR value of \fBrenameold\fR, the existing file is renamed with the extension \fB\&.old\fR, and the new file is installed with updated permissions and timestamp (if present). See the \fBtimestamp\fR attribute description below.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package has a \fBpreserve\fR value of \fBrenamenew\fR, the new file is installed with the extension \fB\&.new\fR and the existing file is not modified.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package has a \fBpreserve\fR value of \fBtrue\fR, the new file is not installed, but the permissions and timestamp (if present) are reset on the existing file.
+.RE
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package exists in the image, has the same content as the file delivered by the currently installed version of the package, and has a \fBpreserve\fR value of either \fBrenameold\fR or \fBrenamenew\fR, the existing file is replaced by the file delivered by the upgraded version of the package, including replacing permissions and timestamp (if present).
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package exists in the image, has a \fBpreserve\fR value of \fBlegacy\fR in the upgraded package, and has a different \fBpreserve\fR value in the currently installed version of the package, the existing file is renamed with the extension \fB\&.legacy\fR, and the new file is installed with updated permissions and timestamp (if present).
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+If the file delivered by the upgraded version of the package exists in the image and has a \fBpreserve\fR value of \fBlegacy\fR in both the upgraded package and the currently installed version of the package, the permissions and timestamp (if present) are reset on the existing file.
+.RE
 .RE
 
 .sp
@@ -159,18 +245,39 @@
 .ad
 .sp .6
 .RS 4n
-This specifies whether the action allows other packages to deliver a file at the same location or whether it delivers a file intended to overlay another. This functionality is intended for use with configuration files that do not participate in any self-assembly (for example, \fB/etc/motd\fR) and that can be safely overwritten.
+Specifies whether the action allows other packages to deliver a file at the same location or whether it delivers a file intended to overlay another file. This functionality is intended for use with configuration files that do not participate in any self-assembly (for example, \fB/etc/motd\fR) and that can be safely overwritten.
 .sp
 If \fBoverlay\fR is not specified, multiple packages cannot deliver files to the same location.
 .sp
-If the value of \fBoverlay\fR is \fBallow\fR, one other package is allowed to deliver a file to the same location. This value has no effect unless the \fBpreserve\fR attribute is also set.
+The \fBoverlay\fR attribute can have one of the following values:
 .sp
-If the value of \fBoverlay\fR is \fBtrue\fR, the file delivered by the action overwrites any other action that has specified \fBallow\fR. Changes to the installed file are preserved based on the value of the \fBpreserve\fR attribute of the overlaying file. On removal, the contents of the file are preserved if the action being overlaid is still installed, regardless of whether the \fBpreserve\fR attribute was specified. Only one action can overlay another, and the \fBmode\fR, \fBowner\fR, and \fBgroup\fR attributes must match.
+.ne 2
+.mk
+.na
+\fB\fBallow\fR\fR
+.ad
+.RS 9n
+.rt  
+One other package is allowed to deliver a file to the same location. This value has no effect unless the \fBpreserve\fR attribute is also set.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fBtrue\fR\fR
+.ad
+.RS 9n
+.rt  
+The file delivered by the action overwrites any other action that has specified \fBallow\fR.
+.RE
+
+Changes to the installed file are preserved based on the value of the \fBpreserve\fR attribute of the overlaying file. On removal, the contents of the file are preserved if the action being overlaid is still installed, regardless of whether the \fBpreserve\fR attribute was specified. Only one action can overlay another, and the \fBmode\fR, \fBowner\fR, and \fBgroup\fR attributes must match.
 .RE
 
 .sp
 .LP
-Files can also be "tasted," and depending on the flavor, can have additional attributes. For ELF files, the following attributes are recognized:
+The following attributes are recognized for ELF files:
 .sp
 .ne 2
 .mk
@@ -205,6 +312,9 @@
 .RE
 
 .sp
+.LP
+The following additional attributes are recognized for \fBfile\fR actions:
+.sp
 .ne 2
 .mk
 .na
@@ -239,6 +349,28 @@
 The \fBrevert-tag\fR attribute can also be specified at the directory level. See "Directory Actions" below.
 .RE
 
+.sp
+.ne 2
+.mk
+.na
+\fB\fBtimestamp\fR\fR
+.ad
+.sp .6
+.RS 4n
+This attribute is used to set the access and modification time on the file. The \fBtimestamp\fR attribute value must be expressed in UTC in ISO-8601 format, omitting the colons and hyphens.
+.sp
+The \fBtimestamp\fR attribute is essential when packaging \fB\&.pyc\fR or \fB\&.pyo\fR files for Python. The related \fB\&.py\fR file for the \fB\&.pyc\fR or \fB\&.pyo\fR files must be marked with the timestamp embedded within those files, as shown in the following example:
+.sp
+.in +2
+.nf
+file path=usr/lib/python2.6/vendor-packages/pkg/__init__.pyc ...
+file path=usr/lib/python2.6/vendor-packages/pkg/__init__.py \e
+     timestamp=20130311T221521Z ...
+.fi
+.in -2
+
+.RE
+
 .SS "Directory Actions"
 .sp
 .LP
@@ -255,6 +387,8 @@
 .sp .6
 .RS 4n
 This attribute is used to identify unpackaged files that should be removed as a set. See "File Actions" above for a description of how to specify this attribute for \fBfile\fR actions. For directories, the value of the \fBrevert-tag\fR attribute is \fItagname\fR\fB=\fR\fIpattern\fR. Multiple \fBrevert-tag\fR attributes can be specified for a single \fBdir\fR action. When \fBpkg revert\fR is invoked with a matching \fItagname\fR, any unpackaged files or directories under this \fBdir\fR directory that match \fIpattern\fR (using shell globbing characters) are removed. See the \fBpkg\fR(1) man page for information about the \fBpkg revert\fR command.
+.sp
+The \fBrevert-tag\fR attribute can also be specified at the directory level. See "Directory Actions" below.
 .RE
 
 .sp
@@ -547,7 +681,7 @@
 .ad
 .sp .6
 .RS 4n
-The dependency must, if present, be at the specified value or better on the image to be modified prior to installation. If the value of the \fBroot-image\fR attribute is \fBtrue\fR, the dependency must be present on the image rooted at / in order to install this package.  If the fmri also starts with pkg:/feature/firmware/, the remainder of the name is treated as a command in /usr/lib/fwenum that evaluates the firmware dependency. See the package developer's guide for details.
+Prior to installation of this package, the dependency target must, if present, be at the specified value or greater on the image to be modified. If the value of the \fBroot-image\fR attribute is \fBtrue\fR, the target must be present on the image rooted at / in order to install this package. If the value of the \fBroot-image\fR attribute is \fBtrue\fR and the value of the \fBfmri\fR attribute starts with \fBpkg:/feature/firmware/\fR, the remainder of the \fBfmri\fR value is treated as a command in \fB/usr/lib/fwenum\fR that evaluates the firmware dependency. See \fIPackaging and Delivering Software With the Image Packaging System in Oracle Solaris 11.2\fR for examples.
 .RE
 
 .sp
@@ -902,7 +1036,7 @@
 The \fBuser\fR action defines a UNIX user as defined in \fB/etc/passwd\fR, \fB/etc/shadow\fR, \fB/etc/group\fR, and \fB/etc/ftpd/ftpusers\fR files. Entries are added to the appropriate files for users defined with this \fBuser\fR action.
 .sp
 .LP
-The  \fBuser\fR action is intended to define a user for a daemon or other software to use. Do not use the \fBuser\fR action to define administrative or interactive accounts.
+The \fBuser\fR action is intended to define a user for a daemon or other software to use. Do not use the \fBuser\fR action to define administrative or interactive accounts.
 .sp
 .LP
 The following attributes are recognized:
@@ -1200,7 +1334,7 @@
 Facets are treated as boolean values by package clients: Facets can be set only to \fBtrue\fR (enabled) or \fBfalse\fR (disabled) in the image. By default, all facets are considered to be set to \fBtrue\fR in the image.
 .sp
 .LP
-Facets can either be set locally within an image or inherited from a parent image. Inherited facets are evaluated before, and hence take priority over, any local facets. If the same exact facet is both inherited and set locally, the inherited facet value masks the local value. Facet changes made via \fBpkg change-facet\fR will only affect local facets.
+Facets can be either set locally within an image or inherited from a parent image. For example, a non-global zone can inherit a facet from the global zone. Inherited facets are evaluated before, and take priority over, any locally set facets. If the same facet is both inherited and locally set, the inherited facet value masks the locally set value. Facet changes made by using the \fBpkg change-facet\fR command only affect locally set facets.
 .sp
 .LP
 The value of a facet tag on an action can be set to \fBall\fR or \fBtrue\fR to control how clients filter faceted actions. All values other than \fBall\fR or \fBtrue\fR have undefined behavior. See below for a description of the conditions that must exist in the image to install an action that has facet tags.
@@ -1241,7 +1375,7 @@
 
 file group=sys mode=0644 overlay=allow owner=root \e
   path=etc/motd pkg.csize=68 pkg.size=48 preserve=true \e
-  variant.debug.osnet=false 
+  variant.debug.osnet=false
 .fi
 .in -2
 
@@ -1403,13 +1537,13 @@
 \fBpkg\fR(1), \fBpkgsend\fR(1), \fBpkg.depotd\fR(1M), \fBpkg.sysrepo\fR(1M), \fBsvcs\fR(1), \fBsvcadm\fR(1M)
 .sp
 .LP
-\fIAdding and Updating Oracle Solaris 11.1 Software Packages\fR
+\fIAdding and Updating Software in Oracle Solaris 11.2\fR
 .sp
 .LP
-\fICopying and Creating Oracle Solaris 11.1 Package Repositories\fR
+\fICopying and Creating Package Repositories in Oracle Solaris 11.2\fR
 .sp
 .LP
-\fIPackaging and Delivering Software With the Oracle Solaris 11 Image Packaging System\fR
+\fIPackaging and Delivering Software With the Image Packaging System in Oracle Solaris 11.2\fR
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkg.depot-config.1m	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkg.depot-config.1m	Fri Oct 11 16:09:34 2013 +0100
@@ -1,8 +1,8 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkg.depotd-config 1M "14 Feb 2013" "SunOS 5.11" "System Administration Commands"
+.TH pkg.depot-config 1M "02 Oct 2013" "SunOS 5.11" "System Administration Commands"
 .SH NAME
-pkg.depotd-config \- Image Packaging System HTTP depot configuration generator
+pkg.depot-config \- Image Packaging System HTTP depot configuration generator
 .SH SYNOPSIS
 .LP
 .nf
@@ -15,7 +15,7 @@
 .SH DESCRIPTION
 .sp
 .LP
-\fBpkg.depotd-config\fR generates the configuration files for the Image Packaging System (IPS) depot. The IPS depot provides scalable read-only access to IPS package repositories over HTTP.
+\fBpkg.depot-config\fR generates the configuration files for the Image Packaging System (IPS) depot. The IPS depot provides scalable read-only access to IPS package repositories over HTTP.
 .sp
 .LP
 The IPS depot is configured using the \fBsvc:/application/pkg/depot\fR Service Management Facility (SMF) service in conjunction with one or more instances of the \fBsvc:/application/pkg/server\fR service.
@@ -27,7 +27,19 @@
 To change depot configuration, modify the properties of the \fBpkg/depot\fR service or the appropriate \fBpkg/server\fR service instance and refresh the instance. Modifying \fBpkg/server\fR service instance states can cause the \fBpkg/depot\fR service to be refreshed and the depot configuration files to be regenerated.
 .sp
 .LP
-To serve multiple repositories, you need a separate \fBpkg/server\fR service instance for each repository but only one \fBpkg/depot\fR service instance. Each instance of the \fBpkg/server\fR service provides a read/write depot server that supports HTTP and HTTPS for an individual repository. The \fBpkg/depot:default\fR service instance provides a scalable, read-only depot server that supports HTTP for multiple repositories.
+To serve multiple repositories, you need a separate \fBpkg/server\fR service instance for each repository but only one \fBpkg/depot\fR service instance. Each instance of the \fBpkg/server\fR service maps to an IPS repository specified by the \fBpkg/inst_root\fR service property. The  \fBpkg/server\fR service does one of the following:
+.RS +4
+.TP
+.ie t \(bu
+.el o
+Runs an associated \fBpkg.depotd\fR process to serve the content of the repository.
+.RE
+.RS +4
+.TP
+.ie t \(bu
+.el o
+Runs no processes and instead helps to configure the \fBpkg.depot\fR service.
+.RE
 .sp
 .LP
 Each repository is supported by a \fBpkg/server\fR service instance. A repository might also be supported by the \fBpkg/depot:default\fR service. If the \fBpkg/standalone\fR property of a particular \fBpkg/server\fR instance is set to \fBtrue\fR, then the repository is served by the \fBpkg.depotd\fR process. If the \fBpkg/standalone\fR property of a particular \fBpkg/server\fR instance is set to \fBfalse\fR, then the repository is served by the \fBpkg/depot:default\fR service. Each \fBpkg/server\fR instance either runs \fBpkg.depotd\fR or contributes configuration information to \fBpkg/depot:default\fR.
@@ -327,7 +339,7 @@
 \fBsvcprop\fR(1), \fBsvcs\fR(1), \fBsvcadm\fR(1M), \fBsvccfg\fR(1M), \fBpkg.depotd\fR(1M), \fBpkg\fR(5)
 .sp
 .LP
-\fICopying and Creating Oracle Solaris 11.1 Package Repositories\fR
+\fICopying and Creating Package Repositories in Oracle Solaris 11.2\fR
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkg.depotd.1m	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkg.depotd.1m	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
-.\" Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
-.TH pkg.depotd 1M "5 Jun 2012" "SunOS 5.11" "System Administration Commands"
+.\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+.TH pkg.depotd 1M "02 Oct 2013" "SunOS 5.11" "System Administration Commands"
 .SH NAME
 pkg.depotd \- Image Packaging System depot server
 .SH SYNOPSIS
@@ -33,7 +33,10 @@
 .SH SMF PROPERTIES
 .sp
 .LP
-The \fBpkg.depot\fR server is generally configured via the SMF properties associated with its service. See the \fBsmf\fR(5) man page for information about SMF properties. The following properties are recognized:
+The \fBpkg.depotd\fR server is generally configured via the SMF properties associated with its service. The \fBsvc:/application/pkg/server\fR service runs a \fBpkg.depot\fR server process, or contributes configuration to the \fBsvc:/application/pkg/depot\fR service. See the \fBpkg.depot-config\fR(1M) man page and the \fBpkg/standalone\fR property below.
+.sp
+.LP
+See the \fBsmf\fR(5) man page for information about SMF properties. The following properties are recognized:
 .sp
 .ne 2
 .mk
@@ -108,7 +111,7 @@
 .ad
 .sp .6
 .RS 4n
-(\fBastring\fR) The file system path at which the instance should find its repository data. Required unless \fBfile_root\fR or \fBPKG_REPO\fR has been provided. The default value is \fB/var/pkgrepo\fR.
+(\fBastring\fR) The file system path at which the instance should find its repository data. Required unless \fBPKG_REPO\fR has been provided. The default value is \fB/var/pkgrepo\fR.
 .RE
 
 .sp
@@ -269,6 +272,17 @@
 .ne 2
 .mk
 .na
+\fB\fBpkg/standalone\fR\fR
+.ad
+.sp .6
+.RS 4n
+(\fBboolean\fR) To easily serve multiple repositories from a single Apache instance with minimal Apache configuration, set this property to \fBfalse\fR and set the \fBpkg/readonly\fR property of this \fBpkg/server\fR instance to \fBtrue\fR. The default value of \fBpkg/standalone\fR is \fBfalse\fR and the default value of \fBpkg/readonly\fR is \fBtrue\fR. See the \fBpkg.depot-config\fR(1M) man page for more information.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
 \fB\fBpkg/threads\fR\fR
 .ad
 .sp .6
@@ -787,10 +801,13 @@
 .SH SEE ALSO
 .sp
 .LP
-\fBdns-sd\fR(1M), \fBmdnsd\fR(1M), \fBpkg\fR(1), \fBpkgrepo\fR(1), \fBpkgsend\fR(1), \fBsyslogd\fR(1M), \fBsmf\fR(5)
+\fBpkg.depot-config\fR(1M), \fBdns-sd\fR(1M), \fBmdnsd\fR(1M), \fBpkg\fR(1), \fBpkgrepo\fR(1), \fBpkgsend\fR(1), \fBsyslogd\fR(1M), \fBsmf\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fICopying and Creating Package Repositories in Oracle Solaris 11.2\fR
+.sp
+.LP
+\fBhttps://java.net/projects/ips/pages/Home\fR
 .SH NOTES
 .sp
 .LP
@@ -803,6 +820,9 @@
 To control read access to the depot, you can use an HTTP reverse proxy in combination with authentication methods such as client based SSL certificate access, which \fBpkg\fR natively supports.
 .sp
 .LP
+To easily serve multiple repositories from a single Apache instance with minimal Apache configuration, set the \fBpkg/standalone\fR property of a particular \fBpkg/server\fR instance to \fBfalse\fR and set the \fBpkg/readonly\fR property of that instance to \fBtrue\fR. See the \fBpkg.depot-config\fR(1M) man page for more information.
+.sp
+.LP
 Changes to configuration, or changes to package data using file system based operations, require a restart of the depot server process so that the changes can be reflected in operations and output. Use one of the following methods to restart the depot server process:
 .RS +4
 .TP
--- a/src/man/pkg.sysrepo.1m	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkg.sysrepo.1m	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
-.\" Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
-.TH pkg.sysrepo 1M "27 May 2012" "SunOS 5.11" "System Administration Commands"
+.\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+.TH pkg.sysrepo 1M "21 May 2013" "SunOS 5.11" "System Administration Commands"
 .SH NAME
 pkg.sysrepo \- Image Packaging System system repository configuration
 .SH SYNOPSIS
@@ -168,4 +168,4 @@
 \fBpkg\fR(1), \fBpkg.depotd\fR(1M), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgdepend.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgdepend.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkgdepend 1 "26 Feb 2013" "SunOS 5.11" "User Commands"
+.TH pkgdepend 1 "12 Jul 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgdepend \- Image Packaging System dependency analyzer
 .SH SYNOPSIS
@@ -115,10 +115,6 @@
 .na
 \fB\fB-R\fR \fIdir\fR\fR
 .ad
-.br
-.na
-\fB\fB--image-dir\fR \fIdir\fR\fR
-.ad
 .sp .6
 .RS 4n
 Operate on the image rooted at \fIdir\fR. If no directory was specified or determined based on environment, the default is /. See the "Environment Variables" section for more information.
@@ -420,12 +416,12 @@
 dir path=opt mode=0755 group=sys owner=root
 dir path=opt/python mode=0755 group=sys owner=root
 dir path=opt/python/foo mode=0755 group=sys owner=root
-file NOHASH path=opt/python/__init__.py mode=0644 group=sys owner=root
-file NOHASH path=opt/python/foo/__init__.py mode=0644 group=sys owner=root
+file path=opt/python/__init__.py mode=0644 group=sys owner=root
+file path=opt/python/foo/__init__.py mode=0644 group=sys owner=root
 #
 # Add runpath and bypass-generate attributes:
 #
-file NOHASH path=opt/python/foo/file.py mode=0644 group=sys owner=root \e
+file path=opt/python/foo/file.py mode=0644 group=sys owner=root \e
     pkg.depend.bypass-generate=.*/test.py.* \e
     pkg.depend.bypass-generate=.*/testmodule.so \e
     pkg.depend.bypass-generate=.*/test.so \e
@@ -522,4 +518,4 @@
 \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgdiff.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgdiff.1	Fri Oct 11 16:09:34 2013 +0100
@@ -8,7 +8,7 @@
 .nf
 /usr/bin/pkgdiff [-i \fIattribute\fR]... [-o \fIattribute\fR]
     [-t \fIaction_name\fR[,\fIaction_name\fR]...]...
-    [-v \fIname\fR=\fIvalue\fR]...  (\fIfile1\fR | -) (\fIfile2\fR | -)
+    [-v \fIname\fR=\fIvalue\fR]... (\fIfile1\fR | -) (\fIfile2\fR | -)
 .fi
 
 .SH DESCRIPTION
@@ -215,4 +215,4 @@
 \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgfmt.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgfmt.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
-.\" Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
-.TH pkgfmt 1 "27 May 2012" "SunOS 5.11" "User Commands"
+.\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+.TH pkgfmt 1 "21 May 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgfmt \- format a package manifest
 .SH SYNOPSIS
@@ -144,4 +144,4 @@
 \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkglint.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkglint.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,12 +1,12 @@
 '\" te
-.\" Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
-.TH pkglint 1 "27 May 2012" "SunOS 5.11" "User Commands"
+.\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+.TH pkglint 1 "02 Apr 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkglint \- Image Packaging System package lint
 .SH SYNOPSIS
 .LP
 .nf
-/usr/bin/pkglint [-c \fIcache_dir\fR] [-r \fIrepo_uri\fR] [-p \fIregexp\fR]
+/usr/bin/pkglint [-c \fIcache_dir\fR] [-r \fIrepo_uri\fR]... [-p \fIregexp\fR]
     [-f \fIconfig_file\fR] [-b \fIbuild_no\fR] [-v]
     [-l \fIlint_uri\fR] | \fImanifest\fR ...
 .fi
@@ -140,7 +140,7 @@
 .ad
 .sp .6
 .RS 4n
-Specify a URI representing the location of the reference repository. If you specify \fB-r\fR, then you must also specify \fB-c\fR.
+Specify a URI representing the location of the reference repository. If you specify \fB-r\fR, then you must also specify \fB-c\fR. The \fB-r\fR option can be specified multiple times.
 .RE
 
 .sp
@@ -392,4 +392,4 @@
 \fBpkg\fR(1), \fBpkg.depotd\fR(1M), \fBpkgsend\fR(1), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgmerge.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgmerge.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkgmerge 1 "26 Feb 2013" "SunOS 5.11" "User Commands"
+.TH pkgmerge 1 "20 May 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgmerge \- Image Packaging System package merging utility
 .SH SYNOPSIS
@@ -22,6 +22,9 @@
 .sp
 .LP
 Non-identical actions that deliver to the same path in an input manifest result in \fBpkgmerge\fR exiting with an error.
+.sp
+.LP
+After a \fBpkgmerge\fR operation, if your destination repository must support \fBpkg search\fR operations, run \fBpkgrepo refresh\fR on the repository to update search indexes.
 .SH OPTIONS
 .sp
 .LP
@@ -364,9 +367,9 @@
 .sp
 .in +2
 .nf
-$ \fBpkgmerge -p dev -s arch=sparc,http://src1.example.com \e\fR
-\fB-s arch=i386,http://src2.example.com \e\fR
-\fB-d /\fIpath/to/target/repository\fR\fR
+$ \fBpkgmerge -p dev -s arch=sparc,http://src1.example.com \e
+-s arch=i386,http://src2.example.com \e
+-d /path/to/target/repository\fR
 .fi
 .in -2
 .sp
@@ -444,4 +447,7 @@
 \fBpkgrepo\fR(1), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fIPackaging and Delivering Software With the Image Packaging System in Oracle Solaris 11.2\fR
+.sp
+.LP
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgmogrify.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgmogrify.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkgmogrify 1 "26 Feb 2013" "SunOS 5.11" "User Commands"
+.TH pkgmogrify 1 "12 Jul 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgmogrify \- Image Packaging System manifest transmogrifier
 .SH SYNOPSIS
@@ -86,7 +86,7 @@
 .sp
 .in +2
 .nf
-file NOHASH path=usr/bin/$(ARCH64)/cputrack ...
+file path=usr/bin/$(ARCH64)/cputrack ...
 .fi
 .in -2
 
@@ -621,4 +621,4 @@
 \fBpkg\fR(1), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgrecv.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgrecv.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,13 +1,14 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkgrecv 1 "4 Mar 2013" "SunOS 5.11" "User Commands"
+.TH pkgrecv 1 "30 Aug 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgrecv \- Image Packaging System content retrieval utility
 .SH SYNOPSIS
 .LP
 .nf
-/usr/bin/pkgrecv [-s \fIsrc_uri\fR] [-a] [-d (\fIpath\fR|\fIdest_uri\fR)]
-    [-c \fIcache_dir\fR] [-kr] [-m \fImatch\fR] [-n] [--raw]
+/usr/bin/pkgrecv [-aknrv] [-s (\fIsrc_path\fR|\fIsrc_uri\fR)]
+    [-d (\fIdest_path\fR|\fIdest_uri\fR)] [-c \fIcache_dir\fR]
+    [-m \fImatch\fR] [--raw]
     [--key \fIsrc_key\fR --cert \fIsrc_cert\fR]
     [--dkey \fIdest_key\fR --dcert \fIdest_cert\fR]
     (\fIfmri\fR|\fIpattern\fR) ...
@@ -15,19 +16,20 @@
 
 .LP
 .nf
-/usr/bin/pkgrecv [-s \fIsrc_uri\fR] --newest
+/usr/bin/pkgrecv [-s (\fIsrc_path\fR|\fIsrc_uri\fR)] --newest
 .fi
 
 .LP
 .nf
-/usr/bin/pkgrecv [-s \fIsrc_uri\fR] [-d \fIpath\fR] [-p \fIpublisher\fR ...] 
-    [--key \fIsrc_key\fR --cert \fIsrc_cert\fR] [-n] --clone
+/usr/bin/pkgrecv [-nv] [-s (\fIsrc_path\fR|\fIsrc_uri\fR)]
+    [-d (\fIdest_path\fR|\fIdest_uri\fR)] [-p \fIpublisher\fR]...
+    [--key \fIsrc_key\fR --cert \fIsrc_cert\fR] --clone
 .fi
 
 .SH DESCRIPTION
 .sp
 .LP
-\fBpkgrecv\fR retrieves all versions of each matching \fIfmri\fR or \fIpattern\fR from the \fIsrc_uri\fR \fBpkg\fR(5) repository or package archive and republishes the retrieved packages to the \fIdest_uri\fR package repository or package archive. By default, packages are retrieved in package repository format suitable for use with \fBpkg\fR, \fBpkg.depotd\fR, and package publication tools.
+\fBpkgrecv\fR retrieves all versions of each matching \fIfmri\fR or \fIpattern\fR from the \fBpkg\fR(5) repository or package archive specified by the \fB-s\fR option and optionally republishes the retrieved packages to the package repository or package archive specified by the \fB-d\fR option. By default, packages are retrieved in package repository format suitable for use with \fBpkg\fR, \fBpkg.depotd\fR, and package publication tools.
 .sp
 .LP
 Packages that have not changed are not republished. Therefore, the time to update an existing repository depends on the number of new and changed packages.
@@ -36,7 +38,7 @@
 Use the \fB-m\fR option to specify whether to retrieve all versions of each matching package, or only the newest version of each matching package.
 .sp
 .LP
-If you do not specify \fB\&'*'\fR to be retrieved, you might want to specify the \fB-r\fR option to be sure to retrieve all the dependencies of the specified packages.
+If you do not specify \fB'*'\fR to be retrieved, you might want to specify the \fB-r\fR option to be sure to retrieve all the dependencies of the specified packages.
 .LP
 Note - 
 .sp
@@ -69,7 +71,7 @@
 .ad
 .sp .6
 .RS 4n
-Store the retrieved package data in a \fBpkg\fR(5) archive at the location specified by \fB-d\fR. The file cannot already exist. This option can be used only with file system based destinations. Although not required, using a file extension of \fB\&.p5p\fR (for example, \fBarchive.p5p\fR) is strongly suggested. This option cannot be combined with \fB--raw\fR.
+Store the retrieved package data in a \fBpkg\fR(5) archive at the location specified by the \fB-d\fR option. The file specified by \fB-d\fR cannot already exist. The \fB-a\fR option can be used only with file system based destinations. Although not required, using a file extension of \fB\&.p5p\fR (for example, \fBarchive.p5p\fR) is strongly suggested. The \fB-a\fR option cannot be combined with the \fB--raw\fR option.
 .RE
 
 .sp
@@ -87,11 +89,11 @@
 .ne 2
 .mk
 .na
-\fB\fB-d\fR (\fIpath\fR|\fIdest_uri\fR)\fR
+\fB\fB-d\fR (\fIdest_path\fR|\fIdest_uri\fR)\fR
 .ad
 .sp .6
 .RS 4n
-Specify the file system path or URI where the retrieved packages should be republished. If \fB-a\fR  is specified, this destination is a new package archive that cannot already exist. If \fB-a\fR  is not specified, this destination must be a package repository that already exists. Use the \fBpkgrepo\fR command to create a new package repository. If \fB-d\fR is not specified, the value of \fBPKG_DEST\fR is used. See "Environment Variables" below.
+Specify the file system path or URI where the retrieved packages should be republished. If \fB-a\fR  is specified, this destination must be a package archive that does not already exist. If \fB-a\fR  is not specified, this destination must be a package repository that already exists. Use the \fBpkgrepo\fR command to create a new package repository. If \fB-d\fR is not specified, the value of \fBPKG_DEST\fR is used. See "Environment Variables" below.
 .RE
 
 .sp
@@ -193,7 +195,7 @@
 .ad
 .sp .6
 .RS 4n
-Only clone the given publisher. Can be specified multiple times. Only valid with --clone.
+Only clone the specified publisher. This option can be specified multiple times. The \fB-p\fR option is valid only with the \fB--clone\fR option.
 .RE
 
 .sp
@@ -211,11 +213,22 @@
 .ne 2
 .mk
 .na
-\fB\fB-s\fR \fIsrc_uri\fR\fR
+\fB\fB-s\fR (\fIsrc_path\fR|\fIsrc_uri\fR)\fR
 .ad
 .sp .6
 .RS 4n
-Specify a URI that represents the location of a \fBpkg\fR(5) repository or package archive from which to receive package data. If \fB-s\fR is not specified, the value of \fBPKG_SRC\fR is used. See "Environment Variables" below.
+Specify the file system path or URI of a \fBpkg\fR(5) repository or package archive from which to receive package data. If \fB-s\fR is not specified, the value of \fBPKG_SRC\fR is used. See "Environment Variables" below.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB-v\fR\fR
+.ad
+.sp .6
+.RS 4n
+Display verbose output, including the number of packages retrieved and their full FMRIs, the number of files retrieved, and the estimated size of the transfer.
 .RE
 
 .sp
@@ -270,7 +283,7 @@
 .ad
 .sp .6
 .RS 4n
-Make an exact copy of the source repository. By default, the clone operation will only succeed if publishers in the source repository are also present in the destination. By using \fB-p\fR, the operation can be limited to specific publishers which will be added to the destination repository if not already present. Packages in the destination repository which are not in the source will be removed. Cloning will leave the destination repository altered in case of an error. It is therefore strongly advised to have the destination repository in its own zfs dataset and to take a snapshot before cloning.
+Make an exact copy of the source repository. By default, the clone operation succeeds only if publishers in the source repository are also present in the destination. To limit the clone operation to specified publishers, use the \fB-p\fR option. Publishers specified by using the \fB-p\fR option are added to the destination repository if they are not already present. Packages that are in the destination repository but not in the source repository are removed. The clone operation leaves the destination repository altered if an error occurs. Therefore, the destination repository should be in its own ZFS dataset, and a snapshot should be created prior to performing the clone operation.
 .RE
 
 .sp
@@ -358,12 +371,20 @@
 .sp
 .in +2
 .nf
-$ \fBpkgrecv -s http://test -d /local/repo -m latest \e\fR
+$ \fBpkgrecv -s http://test -d /local/repo -m latest -v \e\fR
 \fBeditor/vim\fR
 Processing packages for publisher solaris ...
-Retrieving and evaluating 2 package(s)...
-PROCESS       ITEMS     GET (MB)    SEND(MB)
-Completed       2/2    16.7/16.7    44.9/44.9
+Retrieving and evaluating 1 package(s)...
+
+Adding packages ...
+        Packages to add:       1
+      Files to retrieve:    1557
+Estimated transfer size: 9.21 MB
+PROCESS                                     ITEMS    GET (MB) SEND (MB)
+Completed                                     1/1     9.2/9.2 25.4/25.4
+
+FMRI
+pkg://solaris/editor/[email protected],5.11-0.175.1.0.0.24.0:20121023T171531Z
 .fi
 .in -2
 .sp
@@ -414,12 +435,12 @@
 
 .sp
 .LP
-Receive all packages that do not already exist and all changed content from the secure repository located at \fBhttp://pkg.oracle.com/solaris/support/\fR to the repository located at \fB/export/repoSolaris11\fR.
+Receive all packages that do not already exist and all changed content from the secure repository located at \fBhttps://pkg.oracle.com/solaris/support/\fR to the repository located at \fB/export/repoSolaris11\fR. 
 
 .sp
 .in +2
 .nf
-$ \fBpkgrecv -s http://pkg.oracle.com/solaris/support/ \e\fR
+$ \fBpkgrecv -s https://pkg.oracle.com/solaris/support/ \e\fR
 \fB-d /export/repoSolaris11 -m all-timestamps \e\fR
 \fB--key /var/pkg/ssl/Oracle_Solaris_11_Support.key.pem \e\fR
 \fB--cert /var/pkg/ssl/Oracle_Solaris_11_Support.certificate.pem '*'\fR
@@ -577,7 +598,7 @@
 \fBpkgrepo\fR(1), \fBpkgsend\fR(1), \fBpkg\fR(5)
 .sp
 .LP
-\fICopying and Creating Oracle Solaris 11.1 Package Repositories\fR
+\fICopying and Creating Package Repositories in Oracle Solaris 11.2\fR
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgrepo.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgrepo.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,18 +1,19 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkgrepo 1 "26 Feb 2013" "SunOS 5.11" "User Commands"
+.TH pkgrepo 1 "29 Aug 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgrepo \- Image Packaging System repository management utility
 .SH SYNOPSIS
 .LP
 .nf
-/usr/bin/pkgrepo create [--version \fIver\fR] \fIuri_or_path\fR
+/usr/bin/pkgrepo add-publisher -s \fIrepo_uri_or_path\fR
+    \fIpublisher\fR ...
 .fi
 
 .LP
 .nf
-/usr/bin/pkgrepo add-publisher -s \fIrepo_uri_or_path\fR
-    \fIpublisher\fR ...
+/usr/bin/pkgrepo remove-publisher [-n] [--synchronous]
+    -s \fIrepo_uri_or_path\fR \fIpublisher\fR ...
 .fi
 
 .LP
@@ -190,6 +191,56 @@
 .ne 2
 .mk
 .na
+\fB\fBpkgrepo remove-publisher\fR [\fB-n\fR] [\fB--synchronous\fR] \fB-s\fR \fIrepo_uri_or_path\fR \fIpublisher\fR ...\fR
+.ad
+.sp .6
+.RS 4n
+Remove the specified publishers from the repository. Remove all packages and all other data for the specified publishers.
+.sp
+If the default publisher is removed, and only one publisher remains in the repository, that remaining publisher becomes the default. The \fBpublisher/prefix\fR property of the repository is set to that remaining publisher.
+.sp
+If the default publisher is removed, and multiple publishers remain in the repository, or if the last publisher is removed from the repository, the \fBpublisher/prefix\fR property is unset.
+.sp
+This subcommand can be used only with version 4 file system based repositories.
+.sp
+.ne 2
+.mk
+.na
+\fB\fB-n\fR\fR
+.ad
+.sp .6
+.RS 4n
+Perform a trial run of the operation with no publisher changes made. The number of packages to be removed for each publisher is displayed before exiting.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB-s\fR \fIrepo_uri_or_path\fR\fR
+.ad
+.sp .6
+.RS 4n
+Operate on the repository located at the given URI or file system path.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--synchronous\fR\fR
+.ad
+.sp .6
+.RS 4n
+Wait for the operation to complete before returning. If this option is not specified, the command returns immediately while the publisher is removed asynchronously in the background.
+.RE
+
+.RE
+
+.sp
+.ne 2
+.mk
+.na
 \fB\fBpkgrepo get\fR [\fB-F\fR \fIformat\fR] [\fB-H\fR] [\fB-p\fR \fIpublisher\fR]... \fB-s\fR \fIrepo_uri_or_path\fR [\fB--key\fR \fIssl_key\fR \fB--cert\fR \fIssl_cert\fR]... [\fIsection/property\fR ...]\fR
 .ad
 .sp .6
@@ -1084,4 +1135,7 @@
 \fBpkg\fR(1), \fBpkgrecv\fR(1), \fBpkgsend\fR(1), \fBpkg.depotd\fR(1M), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fICopying and Creating Package Repositories in Oracle Solaris 11.2\fR
+.sp
+.LP
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgsend.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgsend.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
 .\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
-.TH pkgsend 1 "26 Feb 2013" "SunOS 5.11" "User Commands"
+.TH pkgsend 1 "30 Jul 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgsend \- Image Packaging System publication client
 .SH SYNOPSIS
@@ -154,6 +154,8 @@
 .sp
 If not specified, \fBpkgsend publish\fR adds the build version to the package FMRI. The \fBpublish\fR tool also adds the timestamp (the current time in UTC) to the package FMRI. See the \fBpkg\fR(5) man page for information about the version string of a package FMRI.
 .sp
+If multiple \fBpkgsend publish\fR processes might be publishing to the same \fB-s\fR repository simultaneously, specifying the \fB--no-catalog\fR option is recommended. See the description of the \fB--no-catalog\fR option below.
+.sp
 .ne 2
 .mk
 .na
@@ -205,10 +207,10 @@
 .ad
 .sp .6
 .RS 4n
-Do not add the package to the publisher's catalog. This option is recommended whenever multiple packages are being published at one time as updates to publisher catalogs must be performed serially. Once publication is complete, the \fBrefresh\fR subcommand of \fBpkgrepo\fR can be used to add the new packages to the respective publisher catalogs.
+Do not add the package to the publisher's catalog. This option is recommended whenever multiple packages are being published at one time because updates to publisher catalogs must be performed serially. Publication performance might be significantly reduced if this option is not used when multiple processes are simultaneously publishing packages. After publication is complete, the new packages can be added to the respective publisher catalogs by using the \fBpkgrepo refresh\fR command.
 .RE
 
-For a descriptions of the \fB-T\fR option, see the \fBgenerate\fR subcommand above.
+For a description of the \fB-T\fR option, see the \fBgenerate\fR subcommand above.
 .RE
 
 .SH ENVIRONMENT VARIABLES
@@ -386,7 +388,10 @@
 \fBpkgdepend\fR(1), \fBpkgrepo\fR(1), \fBpkg.depotd\fR(1M), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fIPackaging and Delivering Software With the Image Packaging System in Oracle Solaris 11.2\fR
+.sp
+.LP
+\fBhttps://java.net/projects/ips/pages/Home\fR
 .SH NOTES
 .sp
 .LP
--- a/src/man/pkgsign.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgsign.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
-.\" Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
-.TH pkgsign 1 "27 May 2012" "SunOS 5.11" "User Commands"
+.\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+.TH pkgsign 1 "21 May 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pkgsign \- Image Packaging System signing utility
 .SH SYNOPSIS
@@ -237,4 +237,4 @@
 \fBpkg\fR(1), \fBpkgrecv\fR(1), \fBpkgsend\fR(1), \fBpkgrepo\fR(1), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pkgsurf.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pkgsurf.1	Fri Oct 11 16:09:34 2013 +0100
@@ -2,7 +2,7 @@
 .\" Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
 .TH pkgsurf 1 "09 Aug 2013" "SunOS 5.11" "User Commands"
 .SH NAME
-pkgsurf \- Image Packaging System repository re-surfacing utility
+pkgsurf \- Image Packaging System repository resurfacing utility
 .SH SYNOPSIS
 .LP
 .nf
@@ -16,16 +16,19 @@
 \fBpkgsurf\fR is a package publication tool for replacing packages in a target repository that have not changed since the latest published version in the reference repository. The result is a new version surface of all packages in the target repository.
 .sp
 .LP
-\fBpkgsurf\fR operations are irreversible; the target repository should be stored in its own ZFS dataset and a snapshot of the dataset should be taken before running \fBpkgsurf\fR in case the operation must be reverted.
+\fBpkgsurf\fR operations are not reversible. In case the \fBpkgsurf\fR operation must be reverted, you should store the target repository in its own ZFS dataset and take a snapshot of the dataset before you run \fBpkgsurf\fR.
+.sp
+.LP
+Packages in the target repository are compared to packages in the reference repository and analyzed for content changes. For each package, if no content change is found, the package manifest is removed from the target repository and replaced with the manifest of the same package from the reference repository.
 .sp
 .LP
-Packages in the target repository are compared to a given reference repository and analyzed for content changes. If no content change can be determined, the package manifest will be removed from the target repository and replaced with that of the reference repository. Afterwards, the dependencies of all packages in the repository are adjusted to reflect the version changes and keep the integrity of the repository intact.
+The target repository must be a file system based repository. For optimal performance, the reference repository should also be a file system based repository.
 .sp
 .LP
-The target repository must be filesystem-based and should only contain one version of each package. If the target repository contains a package which is newer than the latest version in the reference repository and older than the latest version in the target repository, no package version replacement will occur for that package.  For optimal performance, the reference repository should also be filesystem-based.
+The target repository should contain only one version of each package. If the target repository contains a package that is both newer than the latest version in the reference repository and older than the latest version in the target repository, no package manifest replacement is done for that package.
 .sp
 .LP
-The reference repository may contain one or more versions of each package, however, only the latest version will be used for comparison.
+The reference repository can contain one or more versions of each package. However, only the latest version will be used for comparison.
 .SH OPTIONS
 .sp
 .LP
@@ -34,11 +37,37 @@
 .ne 2
 .mk
 .na
+\fB\fB-?\fR\fR
+.ad
+.br
+.na
+\fB\fB--help\fR\fR
+.ad
+.sp .6
+.RS 4n
+Display a usage message.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
 \fB\fB-c\fR \fIpattern\fR\fR
 .ad
 .sp .6
 .RS 4n
-Treat every package whose FMRI matches 'pattern' as changed and do not reversion it, even if there is no content change. Can be specified multiple times.
+Treat every package whose FMRI matches \fIpattern\fR as changed and do not reversion it, even if there is no content change. This option can be specified multiple times.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB-i\fR \fIname\fR\fR
+.ad
+.sp .6
+.RS 4n
+Ignore \fBset\fR actions with the \fBname\fR field set to \fIname\fR for determination of content change. The package will be reversioned even if this action differs between target and reference versions. This option can be specified multiple times.
 .RE
 
 .sp
@@ -49,18 +78,7 @@
 .ad
 .sp .6
 .RS 4n
-Perform a trial run with no changes made to the target repository.
-.RE
-
-.sp
-.ne 2
-.mk
-.na
-\fB\fB-i\fR \fIname\fR\fR
-.ad
-.sp .6
-.RS 4n
-Ignore set actions with the name field set to \fIname\fR for determination of content change. Package will be reversioned even if this action differs between target and reference version. Can be specified multiple times.
+Perform a trial run of the operation with no changes made to the target repository.
 .RE
 
 .sp
@@ -71,9 +89,9 @@
 .ad
 .sp .6
 .RS 4n
-Specify the name of the publisher to be re-surfaced. This option can be specified multiple times.
+Specify the name of the publisher to be resurfaced. This option can be specified multiple times.
 .sp
-By default, packages from all publishers found in target and reference repositories are re-surfaced.
+By default, packages from all publishers found in target and reference repositories are resurfaced.
 .RE
 
 .sp
@@ -95,22 +113,7 @@
 .ad
 .sp .6
 .RS 4n
-Path to target repository. Packages in this repository get reversioned to the versions present in the reference repository. Repository should only contain one version of each package. Must be a filesystem-based repository.
-.RE
-
-.sp
-.ne 2
-.mk
-.na
-\fB\fB-?\fR\fR
-.ad
-.br
-.na
-\fB\fB--help\fR\fR
-.ad
-.sp .6
-.RS 4n
-Display a usage message.
+Specify the path to the target repository. Packages in this repository are reversioned to the versions present in the reference repository according to the comparison results. The target repository should contain only one version of each package. The target repository must be a file system based repository.
 .RE
 
 .SH ENVIRONMENT VARIABLES
@@ -123,30 +126,29 @@
 .na
 \fB\fBPKG_REPO\fR\fR
 .ad
-.RS 10n
+.RS 12n
 .rt  
 The absolute path of the target repository.
 .RE
 
 .SH EXAMPLES
 .LP
-\fBExample 1 \fRRe-surface repository
+\fBExample 1 \fRResurface a Repository
 .sp
 .LP
-Reversion each package in the target repository which did not have any content change from the same package in the reference repository.
+Reversion each package in the target repository that did not have any content change from the same package in the reference repository.
 
 .sp
 .in +2
 .nf
-$ \fBpkgsurf -s /path/to/target \e\fR
-\fB-r http://reference.example.com\fR
+$ \fBpkgsurf -s /path/to/target -r http://reference.example.com\fR
 .fi
 .in -2
 .sp
 
 .sp
 .LP
-Sample package in target:
+Sample package in the target repository:
 
 .sp
 .in +2
@@ -158,7 +160,7 @@
 
 .sp
 .LP
-Sample package in reference:
+Sample package in the reference repository:
 
 .sp
 .in +2
@@ -170,7 +172,7 @@
 
 .sp
 .LP
-Sample package in target after operation:
+Sample package in the target repository after the \fBpkgsurf\fR operation:
 
 .sp
 .in +2
@@ -180,7 +182,6 @@
 .fi
 .in -2
 
-
 .SH EXIT STATUS
 .sp
 .LP
@@ -254,4 +255,7 @@
 \fBpkgrepo\fR(1), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttps://java.net/projects/ips\fR
+\fICopying and Creating Package Repositories in Oracle Solaris 11.2\fR
+.sp
+.LP
+\fBhttps://java.net/projects/ips/pages/Home\fR
--- a/src/man/pm-updatemanager.1	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/man/pm-updatemanager.1	Fri Oct 11 16:09:34 2013 +0100
@@ -1,6 +1,6 @@
 '\" te
-.\" Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
-.TH pm-updatemanager 1 "27 May 2012" "SunOS 5.11" "User Commands"
+.\" Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+.TH pm-updatemanager 1 "21 May 2013" "SunOS 5.11" "User Commands"
 .SH NAME
 pm-updatemanager \- application to update packages
 .SH SYNOPSIS
@@ -169,7 +169,7 @@
 \fBpackagemanager\fR(1), \fBpkg\fR(1), \fBpkg\fR(5)
 .sp
 .LP
-\fBhttp://hub.opensolaris.org/bin/view/Project+pkg/\fR
+\fBhttps://java.net/projects/ips/pages/Home\fR
 .SH NOTES
 .sp
 .LP
--- a/src/modules/actions/file.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/actions/file.py	Fri Oct 11 16:09:34 2013 +0100
@@ -40,6 +40,7 @@
 import _common
 import pkg.actions
 import pkg.client.api_errors as api_errors
+import pkg.digest as digest
 import pkg.misc as misc
 import pkg.portable as portable
 
@@ -190,7 +191,11 @@
                         stream = self.data()
                         tfile = os.fdopen(tfilefd, "wb")
                         try:
-                                shasum = misc.gunzip_from_stream(stream, tfile)
+                                # Always verify using the most preferred hash
+                                hash_attr, hash_val, hash_func  = \
+                                    digest.get_preferred_hash(self)
+                                shasum = misc.gunzip_from_stream(stream, tfile,
+                                    hash_func)
                         except zlib.error, e:
                                 raise ActionExecutionError(self,
                                     details=_("Error decompressing payload: %s")
@@ -200,15 +205,16 @@
                                 tfile.close()
                                 stream.close()
 
-                        if shasum != self.hash:
+                        if shasum != hash_val:
                                 raise ActionExecutionError(self,
                                     details=_("Action data hash verification "
                                     "failure: expected: %(expected)s computed: "
                                     "%(actual)s action: %(action)s") % {
-                                        "expected": self.hash,
+                                        "expected": hash_val,
                                         "actual": shasum,
                                         "action": self
                                     })
+
                 else:
                         temp = final_path
 
@@ -294,11 +300,12 @@
                             "found": misc.time_to_timestamp(lstat.st_mtime),
                             "expected": self.attrs["timestamp"] })
 
-                # avoid checking pkg.size if elfhash present;
-                # different size files may have the same elfhash
+                # avoid checking pkg.size if we have any content-hashes present;
+                # different size files may have the same content-hash
                 if "preserve" not in self.attrs and \
                     "pkg.size" in self.attrs and    \
-                    "elfhash" not in self.attrs and \
+                    not set(digest.RANKED_CONTENT_HASH_ATTRS).intersection(
+                    set(self.attrs.keys())) and \
                     lstat.st_size != int(self.attrs["pkg.size"]):
                         errors.append(_("Size: %(found)d bytes should be "
                             "%(expected)d") % { "found": lstat.st_size,
@@ -312,7 +319,9 @@
                         return errors, warnings, info
 
                 #
-                # Check file contents
+                # Check file contents. At the moment, the only content-hash
+                # supported in pkg(5) is for ELF files, so this will need work
+                # when additional content-hashes are added.
                 #
                 try:
                         # This is a generic mechanism, but only used for libc on
@@ -322,7 +331,10 @@
                         is_mtpt = self.attrs.get("mountpoint", "").lower() == "true"
                         elfhash = None
                         elferror = None
-                        if "elfhash" in self.attrs and haveelf and not is_mtpt:
+                        ehash_attr, elfhash_val, hash_func = \
+                            digest.get_preferred_hash(self,
+                                hash_type=pkg.digest.CONTENT_HASH)
+                        if ehash_attr and haveelf and not is_mtpt:
                                 #
                                 # It's possible for the elf module to
                                 # throw while computing the hash,
@@ -330,16 +342,28 @@
                                 # corrupted or truncated.
                                 #
                                 try:
-                                        elfhash = elf.get_dynamic(path)["hash"]
+                                        # Annoying that we have to hardcode this
+                                        if ehash_attr == \
+                                            "pkg.content-hash.sha256":
+                                                get_sha256 = True
+                                                get_sha1 = False
+                                        else:
+                                                get_sha256 = False
+                                                get_sha1 = True
+                                        elfhash = elf.get_dynamic(path,
+                                            sha1=get_sha1,
+                                            sha256=get_sha256)[ehash_attr]
                                 except RuntimeError, e:
-                                        errors.append("Elfhash: %s" % e)
+                                        errors.append("ELF content hash: %s" %
+                                            e)
 
                                 if elfhash is not None and \
-                                    elfhash != self.attrs["elfhash"]:
-                                        elferror = _("Elfhash: %(found)s "
+                                    elfhash != elfhash_val:
+                                        elferror = _("ELF content hash: "
+                                            "%(found)s "
                                             "should be %(expected)s") % {
                                             "found": elfhash,
-                                            "expected": self.attrs["elfhash"] }
+                                            "expected": elfhash_val }
 
                         # If we failed to compute the content hash, or the
                         # content hash failed to verify, try the file hash.
@@ -348,21 +372,24 @@
                         # changed, since obviously the file hash is a superset
                         # of the content hash.
                         if (elfhash is None or elferror) and not is_mtpt:
-                                hashvalue, data = misc.get_data_digest(path)
-                                if hashvalue != self.hash:
+                                hash_attr, hash_val, hash_func = \
+                                    digest.get_preferred_hash(self)
+                                sha_hash, data = misc.get_data_digest(path,
+                                    hash_func=hash_func)
+                                if sha_hash != hash_val:
                                         # Prefer the content hash error message.
                                         if "preserve" in self.attrs:
                                                 info.append(_(
-                                                    "editable file has" 
-                                                    " been changed"))
+                                                    "editable file has "
+                                                    "been changed"))
                                         elif elferror:
                                                 errors.append(elferror)
                                         else:
                                                 errors.append(_("Hash: "
                                                     "%(found)s should be "
                                                     "%(expected)s") % {
-                                                    "found": hashvalue,
-                                                    "expected": self.hash })
+                                                    "found": sha_hash,
+                                                    "expected": hash_val })
                                         self.replace_required = True
                 except EnvironmentError, e:
                         if e.errno == errno.EACCES:
@@ -414,30 +441,60 @@
                 # a downgrade since that isn't allowed across rename or obsolete
                 # boundaries.
                 is_file = os.path.isfile(final_path)
-                if orig and pkgplan.destination_fmri and \
-                    self.hash != orig.hash and \
-                    pkgplan.origin_fmri and \
-                    pkgplan.destination_fmri.version < pkgplan.origin_fmri.version:
-                        # Installed, preserved file is for a package newer than
-                        # what will be installed.  So check if the version on
-                        # disk is different than what was originally delivered,
-                        # and if so, preserve it.
-                        if is_file:
-                                ihash, cdata = misc.get_data_digest(final_path)
-                                if ihash != orig.hash:
-                                        # .old is intentionally avoided here to
-                                        # prevent accidental collisions with the
-                                        # normal install process.
-                                        return "renameold.update"
-                        return False
+
+                if orig:
+                        # We must use the same hash algorithm when comparing old
+                        # and new actions. Look for the most-preferred common
+                        # hash between old and new. Since the two actions may
+                        # not share a common hash (in which case, we get a tuple
+                        # of 'None' objects) we also need to know the preferred
+                        # hash to use when examining the old action on its own.
+                        common_hash_attr, common_hash_val, \
+                            common_orig_hash_val, common_hash_func = \
+                            digest.get_common_preferred_hash(self, orig)
+
+                        hattr, orig_hash_val, orig_hash_func = \
+                            digest.get_preferred_hash(orig)
+
+                        if common_orig_hash_val and common_hash_val:
+                                changed_hash = common_hash_val != common_orig_hash_val
+                        else:
+                                # we don't have a common hash, so we must treat
+                                # this as a changed action
+                                changed_hash = True
+
+                        if pkgplan.destination_fmri and \
+                            changed_hash and \
+                            pkgplan.origin_fmri and \
+                            pkgplan.destination_fmri.version < pkgplan.origin_fmri.version:
+                                # Installed, preserved file is for a package
+                                # newer than what will be installed. So check if
+                                # the version on disk is different than what
+                                # was originally delivered, and if so, preserve
+                                # it.
+                                if is_file:
+                                        ihash, cdata = misc.get_data_digest(
+                                            final_path,
+                                            hash_func=orig_hash_func)
+                                        if ihash != orig_hash_val:
+                                                # .old is intentionally avoided
+                                                # here to prevent accidental
+                                                # collisions with the normal
+                                                # install process.
+                                                return "renameold.update"
+                                return False
 
                 # If the action has been marked with a preserve attribute, and
                 # the file exists and has a content hash different from what the
                 # system expected it to be, then we preserve the original file
                 # in some way, depending on the value of preserve.
                 if is_file:
-                        chash, cdata = misc.get_data_digest(final_path)
-                        if not orig or chash != orig.hash:
+                        # if we had an action installed, then we know what hash
+                        # function was used to compute it's hash attribute.
+                        if orig:
+                                chash, cdata = misc.get_data_digest(final_path,
+                                    hash_func=orig_hash_func)
+                        if not orig or chash != orig_hash_val:
                                 if pres_type in ("renameold", "renamenew"):
                                         return pres_type
                                 return True
@@ -446,15 +503,40 @@
 
         # If we're not upgrading, or the file contents have changed,
         # retrieve the file and write it to a temporary location.
-        # For ELF files, only write the new file if the elfhash changed.
+        # For files with content-hash attributes, only write the new file if the
+        # content-hash changed.
         def needsdata(self, orig, pkgplan):
                 if self.replace_required:
                         return True
+                # check for the presence of a simple elfhash attribute,
+                # and if that's present, look for the common preferred elfhash.
+                # For now, this is sufficient, but when additional content
+                # types are supported (and we stop publishing SHA-1 hashes) more
+                # work will be needed to compute 'bothelf'.
                 bothelf = orig and "elfhash" in orig.attrs and \
                     "elfhash" in self.attrs
-                if not orig or \
-                    (orig.hash != self.hash and (not bothelf or
-                        orig.attrs["elfhash"] != self.attrs["elfhash"])):
+                if bothelf:
+                        common_elf_attr, common_elfhash, common_orig_elfhash, \
+                            common_elf_func = \
+                            digest.get_common_preferred_hash(self, orig,
+                            hash_type=digest.CONTENT_HASH)
+
+                common_hash_attr, common_hash_val, \
+                    common_orig_hash_val, common_hash_func = \
+                    digest.get_common_preferred_hash(self, orig)
+
+                if not orig:
+                        changed_hash = True
+                elif orig and (common_orig_hash_val is None or
+                    common_hash_val is None):
+                        # we have no common hash so we have to treat this as a
+                        # changed action
+                        changed_hash = True
+                else:
+                        changed_hash = common_hash_val != common_orig_hash_val
+
+                if (changed_hash and (not bothelf or
+                    common_orig_elfhash != common_elfhash)):
                         return True
                 elif orig:
                         # It's possible that the file content hasn't changed
@@ -507,8 +589,11 @@
                         # modified since they were installed and this is
                         # not an upgrade.
                         try:
-                                ihash, cdata = misc.get_data_digest(path)
-                                if ihash != self.hash:
+                                hash_attr, hash_val, hash_func  = \
+                                    digest.get_preferred_hash(self)
+                                ihash, cdata = misc.get_data_digest(path,
+                                    hash_func=hash_func)
+                                if ihash != hash_val:
                                         pkgplan.salvage(path)
                                         # Nothing more to do.
                                         return
@@ -524,7 +609,8 @@
         def different(self, other, cmp_hash=True):
                 # Override the generic different() method to ignore the file
                 # hash for ELF files and compare the ELF hash instead.
-                # XXX This should be modularized and controlled by policy.
+                # XXX This should be modularized and controlled by policy and
+                # needs work once additional content-type hashes are added.
 
                 # One of these isn't an ELF file, so call the generic method
                 if "elfhash" in self.attrs and "elfhash" in other.attrs:
@@ -535,12 +621,27 @@
                 """Generates the indices needed by the search dictionary.  See
                 generic.py for a more detailed explanation."""
 
-                return [
+                index_list = [
+                    # this entry shows the hash as the 'index', and the
+                    # file path as the 'value' when showing results when the
+                    # user has searched for the SHA-1 hash. This seems unusual,
+                    # but maintains the behaviour we had for S11.
                     ("file", "content", self.hash, self.hash),
+                    # This will result in a 2nd row of output when searching for
+                    # the SHA-1 hash, but is consistent with our behaviour for
+                    # the other hash attributes.
+                    ("file", "hash", self.hash, None),
                     ("file", "basename", os.path.basename(self.attrs["path"]),
                     None),
                     ("file", "path", os.path.sep + self.attrs["path"], None)
                 ]
+                for attr in digest.DEFAULT_HASH_ATTRS:
+                        # we already have an index entry for self.hash
+                        if attr == "hash":
+                                continue
+                        hash = self.attrs[attr]
+                        index_list.append(("file", attr, hash, None))
+                return index_list
 
         def save_file(self, image, full_path):
                 """Save a file for later installation (in same process
--- a/src/modules/actions/generic.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/actions/generic.py	Fri Oct 11 16:09:34 2013 +0100
@@ -43,6 +43,7 @@
 import _common
 import pkg.actions
 import pkg.client.api_errors as apx
+import pkg.digest as digest
 import pkg.portable as portable
 import pkg.variant as variant
 
@@ -255,7 +256,9 @@
         def __str__(self):
                 """Serialize the action into manifest form.
 
-                The form is the name, followed by the hash, if it exists,
+                The form is the name, followed by the SHA1 hash, if it exists,
+                (this use of a positional SHA1 hash is deprecated, with
+                pkg.*hash.* attributes being preferred over positional hashes)
                 followed by attributes in the form 'key=value'.  All fields are
                 space-separated; fields with spaces in the values are quoted.
 
@@ -427,9 +430,14 @@
                 if cmp_hash:
                         shash = ohash = None
                         try:
-                                shash = self.hash
-                                ohash = other.hash
-                                if shash != other.hash:
+                                attr, shash, ohash, hfunc = \
+                                    digest.get_common_preferred_hash(
+                                    self, other)
+                                if shash != ohash:
+                                        return True
+                                # If there's no common preferred hash, we have
+                                # to treat these actions as different
+                                if shash is None and ohash is None:
                                         return True
                         except AttributeError:
                                 if shash or ohash:
@@ -477,6 +485,8 @@
                 desired user output is.
                 """
 
+                # Indexing based on the SHA-1 hash is enough for the generic
+                # case.
                 if hasattr(self, "hash"):
                         return [
                             (self.name, "content", self.hash, self.hash),
--- a/src/modules/actions/license.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/actions/license.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a license packaging object
@@ -36,6 +36,7 @@
 from stat import S_IWRITE, S_IREAD
 
 import generic
+import pkg.digest as digest
 import pkg.misc as misc
 import pkg.portable as portable
 import urllib
@@ -95,7 +96,10 @@
 
                 lfile = file(path, "wb")
                 try:
-                        shasum = misc.gunzip_from_stream(stream, lfile)
+                        hash_attr, hash_val, hash_func = \
+                            digest.get_preferred_hash(self)
+                        shasum = misc.gunzip_from_stream(stream, lfile,
+                            hash_func=hash_func)
                 except zlib.error, e:
                         raise ActionExecutionError(self, details=_("Error "
                             "decompressing payload: %s") %
@@ -104,12 +108,12 @@
                         lfile.close()
                         stream.close()
 
-                if shasum != self.hash:
+                if shasum != hash_val:
                         raise ActionExecutionError(self, details=_("Action "
                             "data hash verification failure: expected: "
                             "%(expected)s computed: %(actual)s action: "
                             "%(action)s") % {
-                                "expected": self.hash,
+                                "expected": hash_val,
                                 "actual": shasum,
                                 "action": self
                             })
@@ -138,9 +142,12 @@
                 path = os.path.join(img.get_license_dir(pfmri),
                     "license." + urllib.quote(self.attrs["license"], ""))
 
+                hash_attr, hash_val, hash_func = \
+                    digest.get_preferred_hash(self)
                 if args["forever"] == True:
                         try:
-                                chash, cdata = misc.get_data_digest(path)
+                                chash, cdata = misc.get_data_digest(path,
+                                    hash_func=hash_func)
                         except EnvironmentError, e:
                                 if e.errno == errno.ENOENT:
                                         errors.append(_("License file %s does "
@@ -148,10 +155,10 @@
                                         return errors, warnings, info
                                 raise
 
-                        if chash != self.hash:
+                        if chash != hash_val:
                                 errors.append(_("Hash: '%(found)s' should be "
                                     "'%(expected)s'") % { "found": chash,
-                                    "expected": self.hash})
+                                    "expected": hash_val})
                 return errors, warnings, info
 
         def remove(self, pkgplan):
@@ -174,8 +181,14 @@
                 indices = [("license", idx, self.attrs[idx], None)
                            for idx in self.reverse_indices]
                 if hasattr(self, "hash"):
+                        indices.append(("license", "hash", self.hash, None))
                         indices.append(("license", "content", self.hash, None))
-
+                for attr in digest.DEFAULT_HASH_ATTRS:
+                        # we already have an index entry for self.hash
+                        if attr == "hash":
+                                continue
+                        hash = self.attrs[attr]
+                        indices.append(("license", attr, hash, None))
                 return indices
 
         def get_text(self, img, pfmri, alt_pub=None):
@@ -189,12 +202,15 @@
                 """
 
                 path = self.get_local_path(img, pfmri)
+                hash_attr, hash_attr_val, hash_func = \
+                    digest.get_least_preferred_hash(self)
                 try:
                         with open(path, "rb") as fh:
                                 length = os.stat(path).st_size
                                 chash, txt = misc.get_data_digest(fh,
-                                    length=length, return_content=True)
-                                if chash == self.hash:
+                                    length=length, return_content=True,
+                                    hash_func=hash_func)
+                                if chash == hash_attr_val:
                                         return txt
                 except EnvironmentError, e:
                         if e.errno != errno.ENOENT:
@@ -206,8 +222,8 @@
                         if not alt_pub:
                                 alt_pub = img.get_publisher(pfmri.publisher)
                         assert pfmri.publisher == alt_pub.prefix
-                        return img.transport.get_content(alt_pub, self.hash,
-                            fmri=pfmri)
+                        return img.transport.get_content(alt_pub, hash_attr_val,
+                            fmri=pfmri, hash_func=hash_func)
                 finally:
                         img.cleanup_downloads()
 
--- a/src/modules/actions/signature.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/actions/signature.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -31,6 +31,7 @@
 import generic
 import pkg.actions
 import pkg.client.api_errors as apx
+import pkg.digest as digest
 import pkg.misc as misc
 import M2Crypto as m2
 
@@ -90,10 +91,20 @@
                 """
 
                 self.chain_cert_openers = []
-                hshes = []
-                sizes = []
-                chshes = []
-                csizes = []
+
+                # chain_hshes and chain_chshes are dictionaries which map a
+                # given hash or compressed hash attribute to a list of the hash
+                # values for each path in chain_certs.
+                chain_hshes = {}
+                chain_chshes = {}
+                chain_csizes = []
+                chain_sizes = []
+
+                for attr in digest.DEFAULT_CHAIN_ATTRS:
+                        chain_hshes[attr] = []
+                for attr in digest.DEFAULT_CHAIN_CHASH_ATTRS:
+                        chain_chshes[attr] = []
+
                 for pth in chain_certs:
                         if not os.path.exists(pth):
                                 raise pkg.actions.ActionDataError(
@@ -104,29 +115,52 @@
                         file_opener = self.make_opener(pth)
                         self.chain_cert_openers.append(file_opener)
                         self.attrs.setdefault("chain.sizes", [])
+                        self.attrs.setdefault("chain.csizes", [])
+
                         try:
                                 fs = os.stat(pth)
-                                sizes.append(str(fs.st_size))
+                                chain_sizes.append(str(fs.st_size))
                         except EnvironmentError, e:
                                 raise pkg.actions.ActionDataError(e, path=pth)
                         # misc.get_data_digest takes care of closing the file
                         # that's opened below.
                         with file_opener() as fh:
-                                hsh, data = misc.get_data_digest(fh,
-                                    length=fs.st_size, return_content=True)
-                        hshes.append(hsh)
-                        csize, chash = misc.compute_compressed_attrs(hsh,
-                            None, data, fs.st_size, chash_dir)
-                        csizes.append(csize)
-                        chshes.append(chash.hexdigest())
-                if hshes:
+                                hshes, data = misc.get_data_digest(fh,
+                                    length=fs.st_size, return_content=True,
+                                    hash_attrs=digest.DEFAULT_CHAIN_ATTRS,
+                                    hash_algs=digest.CHAIN_ALGS)
+
+                        for attr in hshes:
+                                chain_hshes[attr].append(hshes[attr])
+
+                        # We need a filename to use for the uncompressed chain
+                        # cert, so get the preferred chain hash value from the
+                        # chain_hshes
+                        chain_val = None
+                        for attr in digest.RANKED_CHAIN_ATTRS:
+                                if not chain_val and attr in hshes:
+                                        chain_val = hshes[attr]
+
+                        csize, chashes = misc.compute_compressed_attrs(
+                            chain_val, None, data, fs.st_size, chash_dir,
+                            chash_attrs=digest.DEFAULT_CHAIN_CHASH_ATTRS,
+                            chash_algs=digest.CHAIN_CHASH_ALGS)
+
+                        chain_csizes.append(csize)
+                        for attr in chashes:
+                                chain_chshes[attr].append(
+                                    chashes[attr].hexdigest())
+                if chain_hshes:
                         # These attributes are stored as a single value with
                         # spaces in it rather than multiple values to ensure
                         # the ordering remains consistent.
-                        self.attrs["chain.sizes"] = " ".join(sizes)
-                        self.attrs["chain"] = " ".join(hshes)
-                        self.attrs["chain.chashes"] = " ".join(chshes)
-                        self.attrs["chain.csizes"] = " ".join(csizes)
+                        self.attrs["chain.sizes"] = " ".join(chain_sizes)
+                        self.attrs["chain.csizes"] = " ".join(chain_csizes)
+
+                        for attr in digest.DEFAULT_CHAIN_ATTRS:
+                                self.attrs[attr] = " ".join(chain_hshes[attr])
+                        for attr in digest.DEFAULT_CHAIN_CHASH_ATTRS:
+                                self.attrs[attr] = " ".join(chain_chshes[attr])
 
         def get_size(self):
                 res = generic.Action.get_size(self)
@@ -141,6 +175,9 @@
                 return res
 
         def get_chain_csize(self, chain):
+                # The length of 'chain' is also going to be the length
+                # of pkg.chain.<hash alg>, so there's no need to look for
+                # other hash attributes here.
                 for c, s in zip(self.attrs.get("chain", "").split(),
                     self.attrs.get("chain.csizes", "").split()):
                         if c == chain:
@@ -187,39 +224,76 @@
                         size = int(self.attrs.get("pkg.size", 0))
                         tmp_dir = tempfile.mkdtemp()
                         with self.data() as fh:
-                                tmp_a.hash, data = misc.get_data_digest(fh,
-                                    size, return_content=True)
-                        csize, chash = misc.compute_compressed_attrs(
+                                hashes, data = misc.get_data_digest(fh,
+                                    size, return_content=True,
+                                    hash_attrs=digest.DEFAULT_HASH_ATTRS,
+                                    hash_algs=digest.HASH_ALGS)
+                                tmp_a.attrs.update(hashes)
+                                # "hash" is special since it shouldn't appear in
+                                # the action attributes, it gets set as a member
+                                # instead.
+                                if "hash" in tmp_a.attrs:
+                                        tmp_a.hash = tmp_a.attrs["hash"]
+                                        del tmp_a.attrs["hash"]
+
+                        # The use of self.hash here is just to point to a
+                        # filename, the type of hash used for self.hash is
+                        # irrelevant. Note that our use of self.hash for the
+                        # basename will need to be modified when we finally move
+                        # off SHA-1 hashes.
+                        csize, chashes = misc.compute_compressed_attrs(
                             os.path.basename(self.hash), self.hash, data, size,
                             tmp_dir)
                         shutil.rmtree(tmp_dir)
                         tmp_a.attrs["pkg.csize"] = csize
-                        tmp_a.attrs["chash"] = chash.hexdigest()
+                        for attr in chashes:
+                                tmp_a.attrs[attr] = chashes[attr].hexdigest()
                 elif self.hash:
                         tmp_a.hash = self.hash
+                        for attr in digest.DEFAULT_HASH_ATTRS:
+                                if attr in self.attrs:
+                                        tmp_a.attrs[attr] = self.attrs[attr]
 
-                hashes = []
                 csizes = []
-                chashes = []
+                chain_hashes = {}
+                chain_chashes = {}
+                for attr in digest.DEFAULT_CHAIN_ATTRS:
+                        chain_hashes[attr] = []
+                for attr in digest.DEFAULT_CHAIN_CHASH_ATTRS:
+                        chain_chashes[attr] = []
+
                 sizes = self.attrs.get("chain.sizes", "").split()
                 for i, c in enumerate(self.chain_cert_openers):
                         size = int(sizes[i])
                         tmp_dir = tempfile.mkdtemp()
-                        hsh, data = misc.get_data_digest(c(), size,
-                            return_content=True)
-                        hashes.append(hsh)
-                        csize, chash = misc.compute_compressed_attrs("tmp",
-                            None, data, size, tmp_dir)
+                        hshes, data = misc.get_data_digest(c(), size,
+                            return_content=True,
+                            hash_attrs=digest.DEFAULT_CHAIN_ATTRS,
+                            hash_algs=digest.CHAIN_ALGS)
+
+                        for attr in hshes:
+                            chain_hashes[attr].append(hshes[attr])
+
+                        csize, chashes = misc.compute_compressed_attrs("tmp",
+                            None, data, size, tmp_dir,
+                            chash_attrs=digest.DEFAULT_CHAIN_CHASH_ATTRS,
+                            chash_algs=digest.CHAIN_CHASH_ALGS)
                         shutil.rmtree(tmp_dir)
                         csizes.append(csize)
-                        chashes.append(chash.hexdigest())
-                if hashes:
-                        tmp_a.attrs["chain"] = " ".join(hashes)
+                        for attr in chashes:
+                                chain_chashes[attr].append(
+                                    chashes[attr].hexdigest())
+
+                if chain_hashes:
+                        for attr in digest.DEFAULT_CHAIN_ATTRS:
+                                if chain_hashes[attr]:
+                                        tmp_a.attrs[attr] = " ".join(
+                                            chain_hashes[attr])
 
                 # Now that tmp_a looks like the post-published action, transform
                 # it into a string using the generic sig_str method.
                 return generic.Action.sig_str(tmp_a, tmp_a, version)
- 
+
         def actions_to_str(self, acts, version):
                 """Transforms a collection of actions into a string that is
                 used to sign those actions."""
@@ -235,18 +309,50 @@
                 """Retrieve the chain certificates needed to validate this
                 signature."""
 
-                for c in self.attrs.get("chain", "").split():
-                        pub.get_cert_by_hash(c, only_retrieve=True)
+                chain_attr, chain_val, hash_func = \
+                    digest.get_least_preferred_hash(self,
+                    hash_type=digest.CHAIN)
+                # We may not have any chain certs for this signature
+                if not chain_val:
+                        return
+                for c in chain_val.split():
+                        pub.get_cert_by_hash(c, only_retrieve=True,
+                            hash_func=hash_func)
 
-        def get_chain_certs(self):
+        def get_chain_certs(self, least_preferred=False):
+                """Return a list of the chain certificates needed to validate
+                this signature. When retrieving the content from the
+                repository, we use the "least preferred" hash for backwards
+                compatibility, but when verifying the content, we use the
+                "most preferred" hash."""
+
+                if least_preferred:
+                        chain_attr, chain_val, hash_func = \
+                            digest.get_least_preferred_hash(self,
+                            hash_type=digest.CHAIN)
+                else:
+                        chain_attr, chain_val, hash_func = \
+                            digest.get_preferred_hash(self,
+                            hash_type=digest.CHAIN)
+                if not chain_val:
+                        return []
+                return chain_val.split()
+
+        def get_chain_certs_chashes(self, least_preferred=False):
                 """Return a list of the chain certificates needed to validate
                 this signature."""
-                return self.attrs.get("chain", "").split()
 
-        def get_chain_certs_chashes(self):
-                """Return a list of the chain certificates needed to validate
-                this signature."""
-                return self.attrs.get("chain.chashes", "").split()
+                if least_preferred:
+                        chain_chash_attr, chain_chash_val, hash_func = \
+                            digest.get_least_preferred_hash(self,
+                            hash_type=digest.CHAIN_CHASH)
+                else:
+                        chain_chash_attr, chain_chash_val, hash_func = \
+                            digest.get_preferred_hash(self,
+                            hash_type=digest.CHAIN_CHASH)
+                if not chain_chash_val:
+                        return []
+                return chain_chash_val.split()
 
         def is_signed(self):
                 """Returns True if this action is signed using a key, instead
@@ -314,14 +420,17 @@
                             computed_hash:
                                 raise apx.UnverifiedSignature(self,
                                     _("The signature value did not match the "
-                                    "expected value. action:%s") % self)
+                                    "expected value. action: %s") % self)
                         return True
                 # Verify a signature that's not just a hash.
                 if self.sig_alg is None:
                         return None
                 # Get the certificate paired with the key which signed this
                 # action.
-                cert = pub.get_cert_by_hash(self.hash, verify_hash=True)
+                attr, hash_val, hash_func = \
+                    digest.get_least_preferred_hash(self)
+                cert = pub.get_cert_by_hash(hash_val, verify_hash=True,
+                    hash_func=hash_func)
                 # Make sure that the intermediate certificates that are needed
                 # to validate this signature are present.
                 self.retrieve_chain_certs(pub)
@@ -418,6 +527,12 @@
                     self.attrs["algorithm"], self.attrs["algorithm"]))
                 res.append((self.name, "signature", self.attrs["value"],
                     self.attrs["value"]))
+                for attr in digest.DEFAULT_HASH_ATTRS:
+                        # we already have an index entry for self.hash
+                        if attr == "hash":
+                                continue
+                        hash = self.attrs[attr]
+                        res.append((self.name, attr, hash, None))
                 return res
 
         def identical(self, other, hsh):
@@ -429,7 +544,25 @@
                         return False
                 # If the code signing certs are identical, the more checking is
                 # needed.
-                if hsh == other.hash or self.hash == other.hash:
+                # Determine if we share any hash attribute values with the other
+                # action.
+                matching_hash_attrs = set()
+                for attr in digest.DEFAULT_HASH_ATTRS:
+                        if attr == "hash":
+                                # we deal with the 'hash' member later
+                                continue
+                        if attr in self.attrs and attr in other.attrs and \
+                            self.attrs[attr] == other.attrs[attr] and \
+                            self.assrs[attr]:
+                                    matching_hash_attrs.add(attr)
+                        if hsh and hsh == other.attrs.get(attr):
+                                # Technically 'hsh' isn't a hash attr, it's
+                                # a hash attr value, but that's enough for us
+                                # to consider it as potentially identical.
+                                matching_hash_attrs.add(hsh)
+
+                if hsh == other.hash or self.hash == other.hash or \
+                    matching_hash_attrs:
                         # If the algorithms are using different algorithms or
                         # have different versions, then they're not identical.
                         if self.attrs["algorithm"]  != \
--- a/src/modules/catalog.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/catalog.py	Fri Oct 11 16:09:34 2013 +0100
@@ -19,7 +19,7 @@
 #
 # CDDL HEADER END
 #
-# Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
 
 """Interfaces and implementation for the Catalog object, as well as functions
 that operate on lists of package FMRIs."""
@@ -64,6 +64,13 @@
                 # Default to a 32K buffer.
                 self.__bufsz = 32 * 1024
 
+                # catalog signatures *must* use sha-1 only since clients
+                # compare entire dictionaries against the reported hash from
+                # the catalog in the various <CatalogPartBase>.validate()
+                # methods rather than just attributes within those dictionaries.
+                # If old clients are to interoperate with new repositories, the
+                # computed and expected dictionaries must be identical at
+                # present, so we must use sha-1.
                 if sign:
                         if not pathname:
                                 # Only needed if not writing to __fileobj.
@@ -158,7 +165,8 @@
                 # Calculating sha-1 this way is much faster than intercepting
                 # write calls because of the excessive number of write calls
                 # that json.dump() triggers (1M+ for /dev catalog files).
-                self.__sha_1_value = misc.get_data_digest(self.pathname)[0]
+                self.__sha_1_value = misc.get_data_digest(self.pathname,
+                    hash_func=hashlib.sha1)[0]
 
                 # Open the JSON file so that the signature data can be added.
                 sfile = file(self.pathname, "rb+", self.__bufsz)
--- a/src/modules/client/api.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/client/api.py	Fri Oct 11 16:09:34 2013 +0100
@@ -5336,8 +5336,11 @@
                         return_type = query_p.Query.RETURN_ACTIONS
                 else:
                         return_type = query_p.Query.RETURN_PACKAGES
-                query_p.Query.__init__(self, text, case_sensitive, return_type,
-                    num_to_return, start_point)
+                try:
+                        query_p.Query.__init__(self, text, case_sensitive,
+                            return_type, num_to_return, start_point)
+                except query_p.QueryLengthExceeded, e:
+                        raise apx.ParseError(e)
 
 
 def get_default_image_root(orig_cwd=None):
--- a/src/modules/client/api_errors.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/client/api_errors.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import errno
@@ -2020,7 +2020,7 @@
                                     "found in %(pfmri)s and has a hash of "
                                     "%(hsh)s") % \
                                     {"pfmri": self.pfmri, "hsh": self.sig.hash}
-                        return _("The package involved is:%s") % self.pfmri
+                        return _("The package involved is %s") % self.pfmri
                 if self.sig:
                         return _("The relevant signature action's value "
                             "attribute is %s") % self.sig.attrs["value"]
@@ -2097,8 +2097,8 @@
                         s = _("The following problems were encountered:\n") + \
                         "\n".join([str(e) for e in self.ext_exs])
                 return _("The certificate which issued this "
-                    "certificate:%(subj)s could not be found. The issuer "
-                    "is:%(issuer)s\n") % {"subj":self.cert.get_subject(),
+                    "certificate: %(subj)s could not be found. The issuer "
+                    "is: %(issuer)s\n") % {"subj":self.cert.get_subject(),
                     "issuer":self.cert.get_issuer()} + s + \
                     CertificateException.__str__(self)
 
--- a/src/modules/client/image.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/client/image.py	Fri Oct 11 16:09:34 2013 +0100
@@ -31,6 +31,7 @@
 import copy
 import datetime
 import errno
+import hashlib
 import os
 import platform
 import shutil
@@ -596,8 +597,12 @@
 
                         try:
                                 # Destination name is based on digest of file.
+                                # In order for this image to interoperate with
+                                # older and newer clients, we must use sha-1
+                                # here.
                                 dest = os.path.join(ssl_dir,
-                                    misc.get_data_digest(src)[0])
+                                    misc.get_data_digest(src,
+                                        hash_func=hashlib.sha1)[0])
                                 if src != dest:
                                         portable.copyfile(src, dest)
 
--- a/src/modules/client/imageplan.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/client/imageplan.py	Fri Oct 11 16:09:34 2013 +0100
@@ -50,6 +50,7 @@
 import pkg.client.pkgdefs as pkgdefs
 import pkg.client.pkgplan as pkgplan
 import pkg.client.plandesc as plandesc
+import pkg.digest as digest
 import pkg.fmri
 import pkg.manifest as manifest
 import pkg.misc as misc
@@ -2586,8 +2587,10 @@
                 """Retrieve text for release note from repo"""
                 try:
                         pub = self.image.get_publisher(pfmri.publisher)
-                        return self.image.transport.get_content(pub, act.hash,
-                            fmri=pfmri)
+                        hash_attr, hash_val, hash_func = \
+                            digest.get_least_preferred_hash(act)
+                        return self.image.transport.get_content(pub, hash_val,
+                            fmri=pfmri, hash_func=hash_func)
                 finally:
                         self.image.cleanup_downloads()
 
--- a/src/modules/client/publisher.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/client/publisher.py	Fri Oct 11 16:09:34 2013 +0100
@@ -58,6 +58,7 @@
 import pkg.client.api_errors as api_errors
 import pkg.client.sigpolicy as sigpolicy
 import pkg.client.pkgdefs as pkgdefs
+import pkg.digest as digest
 import pkg.misc as misc
 import pkg.portable as portable
 import pkg.server.catalog as old_catalog
@@ -1622,7 +1623,8 @@
                 if not os.path.exists(self.__origin_root):
                         return
                 # A digest of the URI string is used here to attempt to avoid
-                # path length problems.
+                # path length problems. In order for this image to interoperate
+                # with older clients, we must use sha-1 here.
                 return os.path.join(self.__origin_root,
                     hashlib.sha1(origin.uri).hexdigest())
 
@@ -1637,6 +1639,8 @@
                 on catalog from each origin."""
 
                 # First, remove catalogs for any origins that no longer exist.
+                # We must interoperate with older clients, so force the use of
+                # sha-1 here.
                 ohashes = [
                     hashlib.sha1(o.uri).hexdigest()
                     for o in self.repository.origins
@@ -2355,6 +2359,8 @@
 
         @staticmethod
         def __hash_cert(c):
+                # In order to interoperate with older images, we must use SHA-1
+                # here.
                 return hashlib.sha1(c.as_pem()).hexdigest()
 
         @staticmethod
@@ -2412,7 +2418,7 @@
                 return pkg_hash
 
         def get_cert_by_hash(self, pkg_hash, verify_hash=False,
-            only_retrieve=False):
+            only_retrieve=False, hash_func=digest.DEFAULT_HASH_FUNC):
                 """Given a pkg5 hash, retrieve the cert that's associated with
                 it.
 
@@ -2435,7 +2441,8 @@
                         with open(pth, "rb") as fh:
                                 s = fh.read()
                 else:
-                        s = self.transport.get_content(self, pkg_hash)
+                        s = self.transport.get_content(self, pkg_hash,
+                            hash_func=hash_func)
                 c = self.__string_to_cert(s, pkg_hash)
                 if not pth_exists:
                         try:
@@ -2447,7 +2454,7 @@
 
                 if verify_hash:
                         h = misc.get_data_digest(cStringIO.StringIO(s),
-                            length=len(s))[0]
+                            length=len(s), hash_func=hash_func)[0]
                         if h != pkg_hash:
                                 raise api_errors.ModifiedCertificateException(c,
                                     pth)
--- a/src/modules/client/query_parser.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/client/query_parser.py	Fri Oct 11 16:09:34 2013 +0100
@@ -19,7 +19,7 @@
 #
 # CDDL HEADER END
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
 
 import os
 import sys
@@ -32,7 +32,7 @@
 from pkg.choose import choose
 
 import pkg.query_parser as qp
-from pkg.query_parser import BooleanQueryException, ParseError
+from pkg.query_parser import BooleanQueryException, ParseError, QueryLengthExceeded
 import itertools
 
 class QueryLexer(qp.QueryLexer):
--- a/src/modules/client/transport/transport.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/client/transport/transport.py	Fri Oct 11 16:09:34 2013 +0100
@@ -44,6 +44,7 @@
 import pkg.client.transport.repo as trepo
 import pkg.client.transport.stats as tstats
 import pkg.client.progress as progress
+import pkg.digest as digest
 import pkg.file_layout.file_manager as fm
 import pkg.fmri
 import pkg.manifest as manifest
@@ -1094,7 +1095,8 @@
                 raise failures
 
         @LockedTransport()
-        def get_content(self, pub, fhash, fmri=None, ccancel=None):
+        def get_content(self, pub, fhash, fmri=None, ccancel=None,
+            hash_func=None):
                 """Given a fhash, return the uncompressed content content from
                 the remote object.  This is similar to get_datastream, except
                 that the transport handles retrieving and decompressing the
@@ -1102,6 +1104,8 @@
 
                 'fmri' If the fhash corresponds to a known package, the fmri
                 should be specified for optimal transport performance.
+
+                'hash_func' is the hash function that was used to compute fhash.
                 """
 
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
@@ -1141,7 +1145,8 @@
                                 resp = d.get_datastream(fhash, v, header,
                                     ccancel=ccancel, pub=pub)
                                 s = cStringIO.StringIO()
-                                hash_val = misc.gunzip_from_stream(resp, s)
+                                hash_val = misc.gunzip_from_stream(resp, s,
+                                    hash_func=hash_func)
 
                                 if hash_val != fhash:
                                         exc = tx.InvalidContentException(
@@ -2415,11 +2420,12 @@
                 check if this action is cached.  This is used for actions which
                 have more than one effective payload."""
 
-                hashval = action.hash
+                hash_attr, hash_val, hash_func = \
+                    digest.get_least_preferred_hash(action)
                 if in_hash:
-                        hashval = in_hash
+                        hash_val = in_hash
                 for cache in self.cfg.get_caches(pub=pub, readonly=True):
-                        cache_path = cache.lookup(hashval)
+                        cache_path = cache.lookup(hash_val)
                         if not cache_path:
                                 continue
                         try:
@@ -2455,20 +2461,40 @@
                 return self._make_opener(self._action_cached(action, pub,
                     verify=False))
 
-        @staticmethod
-        def _verify_content(action, filepath):
+        def _verify_content(self, action, filepath):
                 """If action contains an attribute that has the compressed
                 hash, read the file specified in filepath and verify
                 that the hash values match.  If the values do not match,
                 remove the file and raise an InvalidContentException."""
 
-                chash = action.attrs.get("chash", None)
+                chash_attr, chash, chash_func = digest.get_preferred_hash(
+                    action, hash_type=digest.CHASH)
                 if action.name == "signature":
+                        #
+                        # If we're checking a signature action and the filepath
+                        # parameter points to one of the chain certificates, we
+                        # need to verify against the most-preferred
+                        # [pkg.]chain.chash[.<alg>] attribute that corresponds
+                        # to the filepath we're looking at. We determine the
+                        # index of the least-preferred chain hash that matches
+                        # our filename, and use the most-preferred chash to
+                        # verify against.
+                        #
+                        # i.e. if we have attributes:
+                        # chain="a.a b.b c.c"
+                        # chain.chash="aa bb cc" \
+                        #   pkg.chain.chash.sha256="AA BB CC"
+                        #
+                        # and we're looking at file "b.b" then we must compare
+                        # our computed value against the "BB" chash.
+                        #
                         name = os.path.basename(filepath)
                         found = False
-                        assert len(action.get_chain_certs()) == \
+                        assert len(action.get_chain_certs(
+                            least_preferred=True)) == \
                             len(action.get_chain_certs_chashes())
-                        for n, c in zip(action.get_chain_certs(),
+                        for n, c in zip(
+                            action.get_chain_certs(least_preferred=True),
                             action.get_chain_certs_chashes()):
                                 if name == n:
                                         found = True
@@ -2482,7 +2508,11 @@
                         ofile = open(os.devnull, "wb")
 
                         try:
-                                fhash = misc.gunzip_from_stream(ifile, ofile)
+                                hash_attr, hash_val, hash_func = \
+                                    digest.get_preferred_hash(action,
+                                        hash_type=digest.HASH)
+                                fhash = misc.gunzip_from_stream(ifile, ofile,
+                                    hash_func=hash_func)
                         except zlib.error, e:
                                 s = os.stat(filepath)
                                 os.remove(filepath)
@@ -2494,19 +2524,32 @@
                         ifile.close()
                         ofile.close()
 
-                        if action.hash != fhash:
+                        if hash_val != fhash:
                                 s = os.stat(filepath)
                                 os.remove(filepath)
                                 raise tx.InvalidContentException(action.path,
                                     "hash failure:  expected: %s"
-                                    "computed: %s" % (action.hash, fhash),
+                                    "computed: %s" % (hash, fhash),
                                     size=s.st_size)
                         return
 
-                newhash = misc.get_data_digest(filepath)[0]
+                newhash = misc.get_data_digest(filepath,
+                    hash_func=chash_func)[0]
                 if chash != newhash:
                         s = os.stat(filepath)
-                        os.remove(filepath)
+                        # Check whether we're using the path as a part of the
+                        # content cache, or whether we're actually looking at a
+                        # file:// repository. It's safe to remove the corrupted
+                        # file only if it is part of a cache. Otherwise,
+                        # "pkgrepo verify/fix" should be used to check
+                        # repositories.
+                        cache_fms = self.cfg.get_caches(readonly=False)
+                        remove_content = False
+                        for fm in cache_fms:
+                                if filepath.startswith(fm.root):
+                                        remove_content = True
+                        if remove_content:
+                                os.remove(filepath)
                         raise tx.InvalidContentException(path,
                             "chash failure: expected: %s computed: %s" % \
                             (chash, newhash), size=s.st_size)
@@ -3026,11 +3069,12 @@
                                     filesz, cachehit=True)
                         return
 
-                hashval = action.hash
-
-                self.add_hash(hashval, action)
+                # only retrieve the least preferred hash for this action
+                hash_attr, hash_val, hash_func = \
+                    digest.get_least_preferred_hash(action)
+                self.add_hash(hash_val, action)
                 if action.name == "signature":
-                        for c in action.get_chain_certs():
+                        for c in action.get_chain_certs(least_preferred=True):
                                 self.add_hash(c, action)
 
         def add_hash(self, hashval, item):
@@ -3109,7 +3153,7 @@
             progtrack=None, ccancel=None, alt_repo=None):
                 """Supply the destination publisher in the pub argument.
                 The transport object should be passed in xport.
-                
+
                 'final_dir' indicates the directory the retrieved files should
                 be moved to after retrieval. If it is set to None, files will
                 not be moved and remain in the cache directory specified
@@ -3129,18 +3173,19 @@
 
                 cpath = self._transport._action_cached(action,
                     self.get_publisher())
-                hashval = action.hash
+                hash_attr, hash_val, hash_func = \
+                    digest.get_least_preferred_hash(action)
 
                 if cpath and self._final_dir:
-                        self._final_copy(hashval, cpath)
+                        self._final_copy(hash_val, cpath)
                         if self._progtrack:
                                 filesz = int(misc.get_pkg_otw_size(action))
                                 self._progtrack.download_add_progress(1, filesz,
                                     cachehit=True)
                 else:
-                        self.add_hash(hashval, action)
+                        self.add_hash(hash_val, action)
                 if action.name == "signature":
-                        for c in action.get_chain_certs():
+                        for c in action.get_chain_certs(least_preferred=True):
                                 cpath = self._transport._action_cached(action,
                                     self.get_publisher(), in_hash=c)
                                 if cpath and self._final_dir:
@@ -3234,7 +3279,7 @@
                 src = file(current_path, "rb")
                 outfile = os.fdopen(fd, "wb")
                 if self._decompress:
-                        misc.gunzip_from_stream(src, outfile)
+                        misc.gunzip_from_stream(src, outfile, ignore_hash=True)
                 else:
                         while True:
                                 buf = src.read(64 * 1024)
@@ -3279,7 +3324,7 @@
                 repo = publisher.Repository(origins=repouri_list)
 
         for origin in repo.origins:
-                if origin.scheme == "https": 
+                if origin.scheme == "https":
                         origin.ssl_key = ssl_key
                         origin.ssl_cert = ssl_cert
 
@@ -3316,7 +3361,7 @@
                 if p.repository:
                         for origin in p.repository.origins:
                                 if origin.scheme == \
-                                    pkg.client.publisher.SSL_SCHEMES: 
+                                    pkg.client.publisher.SSL_SCHEMES:
                                         origin.ssl_key = ssl_key
                                         origin.ssl_cert = ssl_cert
 
--- a/src/modules/depotcontroller.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/depotcontroller.py	Fri Oct 11 16:09:34 2013 +0100
@@ -19,7 +19,7 @@
 #
 # CDDL HEADER END
 #
-# Copyright (c) 2008, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import httplib
@@ -386,6 +386,9 @@
                 return args
 
         def __initial_start(self):
+                """'env_arg' can be a dictionary of additional os.environ
+                entries to use when starting the depot."""
+
                 if self.__state != self.HALTED:
                         raise DepotStateException("Depot already starting or "
                             "running")
@@ -416,6 +419,7 @@
                 self.__starttime = time.time()
 
         def start(self):
+
                 try:
                         self.__initial_start()
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/digest.py	Fri Oct 11 16:09:34 2013 +0100
@@ -0,0 +1,362 @@
+#!/usr/bin/python2.6
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+#
+
+import hashlib
+
+# When running the test suite, we alter our behaviour depending on certain
+# debug flags.
+from pkg.client.debugvalues import DebugValues
+
+# pkg(5) uses cryptographic hash functions for a number of tasks. We define the
+# default hash function, along with the hash name here. Note that the use of
+# hashes in package metadata is *not* governed by this value, since multiple
+# hashes are supported for payload-bearing actions in a package.
+#
+# Some uses of hashes are image-format specific, and may require image version
+# increments, in which case the required algorithm is hardcoded where it is
+# used, along with with an appropriate comment.
+#
+# Other uses are essentially volatile, and the hash used has no persistence
+# (e.g. saving the hash to a file in a temporary directory, when the hash gets
+# regenerated on service restart). For those volatile uses, DEFAULT_HASH_FUNC is
+# recommended.
+DEFAULT_HASH_FUNC = hashlib.sha1
+DEFAULT_HASH_NAME = "sha-1"
+
+# DEFAULT_XXX_ATTRS are the attributes added to actions by the packaging system
+# at publication time.
+#
+# Notably, the hashes we add to an action at publication *do not* need to
+# correspond to the hashes we may use to verify action payload during install or
+# update, allowing an upgrade path where we could choose to drop publication
+# support for a certain hash algorithm, but still retain the ability to install
+# actions using that hash.
+#
+# The order of these lists of attributes is significant only to the
+# extent that the repository code will store the file in the repository using
+# the first hash value in the list when using the *old* publication model (ie.
+# a transaction, with multiple add_file(..) methods to add content)
+#
+# Otherwise, when publishing, we always store files in the repository
+# using the "least preferred" hash for maximum backwards compatibility with
+# older packaging tools that expect to be able to find those hashes in the
+# repository, but do add additional hashes to the action metadata.
+#
+# When using the transport to download content from a repository, we use the
+# least preferred_hash for file retrieval, but verify the installed content
+# using the "most preferred" hash. See get_preferred_hash(..),
+# get_least_preferred_hash(..) and get_common_preferred_hash(..)
+#
+if DebugValues["hash"] == "sha1+sha256":
+        # Simulate pkg(5) where SHA-1 and SHA-256 are used for publication
+        DEFAULT_HASH_ATTRS = ["hash", "pkg.hash.sha256"]
+        DEFAULT_CHASH_ATTRS = ["chash", "pkg.chash.sha256"]
+        DEFAULT_CONTENT_HASH_ATTRS = ["elfhash", "pkg.content-hash.sha256"]
+        DEFAULT_CHAIN_ATTRS = ["chain", "pkg.chain.sha256"]
+        DEFAULT_CHAIN_CHASH_ATTRS = ["chain.chashes",
+            "pkg.chain.chashes.sha256"]
+
+elif DebugValues["hash"] == "sha256":
+        # Simulate pkg(5) where SHA-1 is no longer used for publication
+        DEFAULT_HASH_ATTRS = ["pkg.hash.sha256"]
+        DEFAULT_CHASH_ATTRS = ["pkg.chash.sha256"]
+        DEFAULT_CONTENT_HASH_ATTRS = ["pkg.content-hash.sha256"]
+        DEFAULT_CHAIN_ATTRS = ["pkg.chain.sha256"]
+        DEFAULT_CHAIN_CHASH_ATTRS = ["pkg.chain.chashes.sha256"]
+
+else:
+        # The current default is to add just a single hash value for each hash
+        # type
+        DEFAULT_HASH_ATTRS = ["hash"]
+        DEFAULT_CHASH_ATTRS = ["chash"]
+        # 'elfhash' was the only content-hash attribute originally supported
+        DEFAULT_CONTENT_HASH_ATTRS = ["elfhash"]
+        DEFAULT_CHAIN_ATTRS = ["chain"]
+        DEFAULT_CHAIN_CHASH_ATTRS = ["chain.chashes"]
+
+# The types of hashes we compute or consult for actions.
+HASH = 0
+CHASH = 1
+CONTENT_HASH = 2
+CHAIN = 3
+CHAIN_CHASH = 4
+
+# In the dictionaries below, we map the action attributes to the name of the
+# class or factory-method that returns an object used to compute that attribute.
+# The class or factory-method takes a 0-parameter constructor to return an
+# object which must have an 'update(data)'  method , used to update the hash
+# value being computed with this data, along with a 'hexdigest()' method to
+# return the hexadecimal value of the hash.
+#
+# At present, these are all hashlib factory methods. When maintaining these
+# dictionaries, it is important to *never remove* entries from them, otherwise
+# clients with installed packages will not be able to verify their content when
+# pkg(5) is updated.
+
+# A dictionary of the pkg(5) hash attributes we know about.
+if DebugValues["hash"] == "sha1":
+        # Simulate older non-SHA2 aware pkg(5) code
+        HASH_ALGS = {"hash": hashlib.sha1}
+else:
+        HASH_ALGS = {
+            "hash":            hashlib.sha1,
+            "pkg.hash.sha256": hashlib.sha256,
+        }
+
+# A dictionary of the compressed hash attributes we know about.
+CHASH_ALGS = {}
+for key in HASH_ALGS:
+        CHASH_ALGS[key.replace("hash", "chash")] = HASH_ALGS[key]
+
+# A dictionary of the content-hash attributes we know about.
+# For now, ELF files are the only ones which have a specific content-hash
+# attribute.
+CONTENT_HASH_ALGS = {}
+for key in HASH_ALGS:
+        if key == "hash":
+                CONTENT_HASH_ALGS["elfhash"] = HASH_ALGS[key]
+        else:
+                CONTENT_HASH_ALGS[key.replace("hash", "content-hash")] = \
+                    HASH_ALGS[key]
+
+# A dictionary of signature action chain hash attributes we know about.
+CHAIN_ALGS = {}
+for key in HASH_ALGS:
+        CHAIN_ALGS[key.replace("hash", "chain")] = HASH_ALGS[key]
+
+# A dictionary of signature action chain chash attributes we know about.
+CHAIN_CHASH_ALGS = {}
+for key in HASH_ALGS:
+        CHAIN_CHASH_ALGS[key.replace("hash", "chain.chashes")] = HASH_ALGS[key]
+
+
+# Ordered lists of "most preferred" hash algorithm to "least preferred"
+# algorithm for each hash attribute we use. It's important to *never remove*
+# items from this list, otherwise we would strand clients installed with
+# packages using hashes that correspond to that item. Instead promote/demote the
+# hash algorithm so that better hashes are used for new packages.
+# 'hash' is a dummy attribute name, since it really references the action.hash
+# member.
+#
+if DebugValues["hash"] == "sha1":
+        RANKED_HASH_ATTRS = ("hash")
+elif DebugValues["hash"] == "sha2":
+        RANKED_HASH_ATTRS = ("pkg.hash.sha256")
+else:
+        RANKED_HASH_ATTRS = (
+            "pkg.hash.sha256",
+            "hash",
+        )
+
+RANKED_CHASH_ATTRS = tuple(key.replace("hash", "chash")
+    for key in RANKED_HASH_ATTRS)
+_content_hash_attrs = []
+for key in RANKED_HASH_ATTRS:
+        if key == "hash":
+                _content_hash_attrs.append("elfhash")
+        else:
+                _content_hash_attrs.append(key.replace("hash", "content-hash"))
+
+RANKED_CONTENT_HASH_ATTRS = tuple(_content_hash_attrs)
+RANKED_CHAIN_ATTRS = tuple(key.replace("hash", "chain") for key in
+    RANKED_HASH_ATTRS)
+RANKED_CHAIN_CHASH_ATTRS = tuple(key.replace("hash", "chain.chashes") for key in
+    RANKED_HASH_ATTRS)
+
+
+# We keep reverse-order lists for all of the hash attribute we know about
+# because hash retrieval from the repository is always done using the least
+# preferred hash, allowing for backwards compatibility with existing clients.
+# Rather than compute the reverse-list every time we call
+# get_least_preferred_hash(..) we compute them here.
+REVERSE_RANKED_HASH_ATTRS = RANKED_HASH_ATTRS[::-1]
+REVERSE_RANKED_CHASH_ATTRS = RANKED_CHASH_ATTRS[::-1]
+REVERSE_RANKED_CONTENT_HASH_ATTRS = RANKED_CONTENT_HASH_ATTRS[::-1]
+REVERSE_RANKED_CHAIN_ATTRS = RANKED_CHAIN_ATTRS[::-1]
+REVERSE_RANKED_CHAIN_CHASH_ATTRS = RANKED_CHAIN_CHASH_ATTRS[::-1]
+
+
+def _get_hash_dics(hash_type, reverse=False):
+        """Based on the 'hash_type', return a tuple describing the ranking of
+        hash attributes from "most preferred" to "least preferred" and a
+        mapping of those attributes to the hash algorithms that are used to
+        compute those attributes.
+
+        If 'reverse' is true, return the rank_tuple in reverse order, from least
+        preferred hash to most preferred hash.
+        """
+
+        if hash_type == HASH:
+                if reverse:
+                        rank_tuple = REVERSE_RANKED_HASH_ATTRS
+                else:
+                        rank_tuple = RANKED_HASH_ATTRS
+                hash_dic = HASH_ALGS
+        elif hash_type == CHASH:
+                if reverse:
+                        rank_tuple = REVERSE_RANKED_CHASH_ATTRS
+                else:
+                        rank_tuple = RANKED_CHASH_ATTRS
+                hash_dic = CHASH_ALGS
+        elif hash_type == CONTENT_HASH:
+                if reverse:
+                        rank_tuple = REVERSE_RANKED_CONTENT_HASH_ATTRS
+                else:
+                        rank_tuple = RANKED_CONTENT_HASH_ATTRS
+                hash_dic = CONTENT_HASH_ALGS
+        elif hash_type == CHAIN:
+                if reverse:
+                        rank_tuple = REVERSE_RANKED_CHAIN_ATTRS
+                else:
+                        rank_tuple = RANKED_CHAIN_ATTRS
+                hash_dic = CHAIN_ALGS
+        elif hash_type == CHAIN_CHASH:
+                if reverse:
+                        rank_tuple = REVERSE_RANKED_CHAIN_CHASH_ATTRS
+                else:
+                        rank_tuple = RANKED_CHAIN_CHASH_ATTRS
+                hash_dic = CHAIN_CHASH_ALGS
+        else:
+                rank_tuple = None
+                hash_dic = None
+
+        return rank_tuple, hash_dic
+
+
+def get_preferred_hash(action, hash_type=HASH):
+        """Returns a tuple of the form (hash_attr, hash_val, hash_func)
+        where 'hash_attr' is the preferred hash attribute name, 'hash_val'
+        is the the preferred hash value, and 'hash_func' is the function
+        used to compute the preferred hash based on the available
+        pkg.*hash.* attributes declared in the action."""
+
+        rank_tuple, hash_dic = _get_hash_dics(hash_type)
+        if not (rank_tuple and hash_dic):
+                raise ValueError("Unknown hash_type %s passed to "
+                    "get_preferred_hash" % hash_type)
+
+        for hash_attr_name in rank_tuple:
+                if hash_attr_name in action.attrs:
+                        return hash_attr_name, action.attrs[hash_attr_name], \
+                            hash_dic[hash_attr_name]
+
+        # fallback to the default hash member since it's not in action.attrs
+        if hash_type == HASH:
+                return None, action.hash, hashlib.sha1
+        # an action can legitimately have no chash
+        if hash_type == CHASH:
+                return None, None, DEFAULT_HASH_FUNC
+        # an action can legitimately have no content-hash if it's not a file
+        # type we know about
+        if hash_type == CONTENT_HASH:
+                return None, None, None
+        # an action can legitimately have no chain
+        if hash_type == CHAIN:
+                return None, None, None
+        # an action can legitimately have no chain_chash
+        if hash_type == CHAIN_CHASH:
+                return None, None, None
+
+        # This should never happen.
+        raise Exception("Error determining the preferred hash for %s %s" %
+            (action, hash_type))
+
+
+def get_least_preferred_hash(action, hash_type=HASH):
+        """Returns a tuple of the least preferred hash attribute name, the hash
+        value that should result when we compute the hash, and the function used
+        to compute the hash based on the available hash and pkg.*hash.*
+        attributes declared in the action."""
+
+        # the default hash member since it's not in action.attrs
+        if hash_type == HASH:
+                if not action:
+                        return "hash", None, hashlib.sha1
+
+                # This is nearly always true, except when we're running the
+                # test suite and have intentionally disabled SHA-1 hashes.
+                if "hash" in DEFAULT_HASH_ATTRS:
+                        return None, action.hash, hashlib.sha1
+
+        rank_list, hash_dic = _get_hash_dics(hash_type, reverse=True)
+        if not (rank_list and hash_dic):
+                raise ValueError("Unknown hash_type %s passed to "
+                    "get_preferred_hash" % hash_type)
+
+        if not action:
+                return rank_list[0], None, hash_dic[rank_list[0]]
+
+        for hash_attr_name in rank_list:
+                if hash_attr_name in action.attrs:
+                        return hash_attr_name, action.attrs[hash_attr_name], \
+                            hash_dic[hash_attr_name]
+
+        # an action can legitimately have no chash
+        if hash_type == CHASH:
+                return None, None, DEFAULT_HASH_FUNC
+        # an action can legitimately have no content-hash if it's not a file
+        # type we know about
+        if hash_type == CONTENT_HASH:
+                return None, None, None
+        # an action can legitimately have no chain
+        if hash_type == CHAIN:
+                return None, None, None
+
+        # This should never happen.
+        raise Exception("Error determining the least preferred hash for %s %s" %
+            (action, hash_type))
+
+
+def get_common_preferred_hash(action, old_action, hash_type=HASH):
+        """Returns the best common hash attribute of those shared between a new
+        action and an installed (old) version of that action. We return the
+        name of the common attribute, the new and original values of that
+        attribute, and the function used to compute the hash.
+
+        If no common attribute is found, we fallback to the legacy <Action>.hash
+        member assuming it is not None for the new and orig actions, and specify
+        hashlib.sha1 as the algorithm. If no 'hash' member is set, we return
+        a tuple of None objects."""
+
+        if not old_action:
+                return None, None, None, None
+
+        rank_list, hash_dic = _get_hash_dics(hash_type)
+        if not (rank_list and hash_dic):
+                raise ValueError("Unknown hash_type %s passed to "
+                    "get_preferred_common_hash" % hash_type)
+
+        common_attrs = set(
+            action.attrs.keys()).intersection(set(old_action.attrs.keys()))
+        for hash_attr_name in rank_list:
+                if hash_attr_name in common_attrs:
+                        return hash_attr_name, action.attrs[hash_attr_name], \
+                            old_action.attrs[hash_attr_name], \
+                            hash_dic[hash_attr_name]
+
+        if action.hash and old_action.hash:
+                return None, action.hash, old_action.hash, hashlib.sha1
+        return None, None, None, None
--- a/src/modules/elf.c	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/elf.c	Fri Oct 11 16:09:34 2013 +0100
@@ -20,8 +20,7 @@
  */
 
 /*
- * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
- * Use is subject to license terms.
+ *  Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
  */
 
 #include <sys/stat.h>
@@ -38,6 +37,23 @@
 
 #include <Python.h>
 
+/*
+ * When getting information about ELF files, sometimes we want to decide
+ * which types of hash we want to calculate. This structure is used to
+ * return information from arg parsing Python method arguments.
+ *
+ * 'fd'      the file descriptor of an ELF file
+ * 'sha1'    an integer > 0 if we should calculate an SHA-1 hash
+ * 'sha256'  an integer > 0 if we should calculate an SHA-2 256 hash
+ *
+ */
+typedef struct
+{
+    int fd;
+    int sha1;
+    int sha256;
+} dargs_t;
+
 static int
 pythonify_ver_liblist_cb(libnode_t *n, void *info, void *info2)
 {
@@ -114,6 +130,42 @@
 	return (fd);
 }
 
+static dargs_t
+py_get_dyn_args(PyObject *args, PyObject *kwargs)
+{
+	int fd = -1;
+	char *f;
+        int get_sha1 = 1;
+        int get_sha256 = 0;
+
+        dargs_t dargs;
+        dargs.fd = -1;
+        /*
+         * By default, we always get an SHA-1 hash, and never get an SHA-2
+         * hash.
+         */
+        dargs.sha1 = 1;
+        dargs.sha256 = 0;
+
+        static char *kwlist[] = {"fd", "sha1", "sha256", NULL};
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|ii", kwlist, &f,
+            &get_sha1, &get_sha256)) {
+		PyErr_SetString(PyExc_ValueError, "could not parse argument");
+		return (dargs);
+	}
+
+	if ((fd = open(f, O_RDONLY)) < 0) {
+		PyErr_SetFromErrnoWithFilename(PyExc_OSError, f);
+		return (dargs);
+	}
+
+        dargs.fd = fd;
+        dargs.sha1 = get_sha1;
+        dargs.sha256 = get_sha256;
+	return (dargs);
+}
+
 /*
  * For ELF operations: Need to check if a file is an ELF object.
  */
@@ -185,7 +237,7 @@
 
 /*
  * Returns a dictionary with the relevant information.  No longer
- * accurately titled "get_dynamic," as it returns the hash as well.
+ * accurately titled "get_dynamic," as can return hashes as well.
  *
  * The hash is currently of the following sections (when present):
  * 		.text .data .data1 .rodata .rodata1
@@ -197,11 +249,16 @@
  *	defs: ["version", ... ],
  *	deps: [["file", ["versionlist"]], ...],
  * 	hash: "sha1hash"
+ *      pkg.elf.sha256: "sha2hash"
  * }
  *
  * If any item is empty or has no value, it is omitted from the
  * dictionary.
  *
+ * The keyword arguments "sha1" and "sha256" are allowed, which
+ * take Python booleans, declaring which hashes should be
+ * computed on the input file.
+ *
  * XXX: Currently, defs contains some duplicate entries.  There
  * may be meaning attached to this, or it may just be something
  * worth trimming out at this stage or above.
@@ -209,20 +266,23 @@
  */
 /*ARGSUSED*/
 static PyObject *
-get_dynamic(PyObject *self, PyObject *args)
+get_dynamic(PyObject *self, PyObject *args, PyObject *keywords)
 {
-	int 	fd, i;
+	int 	i;
+        dargs_t         dargs;
 	dyninfo_t 	*dyn = NULL;
 	PyObject	*pdep = NULL;
 	PyObject	*pdef = NULL;
 	PyObject	*pdict = NULL;
 	char		hexhash[41];
+        char            hexsha256[65];
 	char		hexchars[17] = "0123456789abcdef";
 
-	if ((fd = py_get_fd(args)) < 0)
+	dargs = py_get_dyn_args(args, keywords);
+        if (dargs.fd < 0)
 		return (NULL);
 
-	if ((dyn = getdynamic(fd)) == NULL)
+	if ((dyn = getdynamic(dargs.fd, dargs.sha1, dargs.sha256)) == NULL)
 		goto out;
 
 	pdict = PyDict_New();
@@ -259,13 +319,25 @@
 		PyDict_SetItemString(pdict, "runpath", Py_BuildValue("s", str));
 	}
 
-	for (i = 0; i < 20; i++) {
-		hexhash[2 * i] = hexchars[(dyn->hash[i] & 0xf0) >> 4];
-		hexhash[2 * i + 1] = hexchars[dyn->hash[i] & 0x0f];
-	}
-	hexhash[40] = '\0';
+        if (dargs.sha1 > 0) {
+                for (i = 0; i < 20; i++) {
+                        hexhash[2 * i] = hexchars[(dyn->hash[i] & 0xf0) >> 4];
+                        hexhash[2 * i + 1] = hexchars[dyn->hash[i] & 0x0f];
+                }
+                hexhash[40] = '\0';
+        	PyDict_SetItemString(pdict, "hash", Py_BuildValue("s", hexhash));
+        }
 
-	PyDict_SetItemString(pdict, "hash", Py_BuildValue("s", hexhash));
+        if (dargs.sha256 > 0) {
+                for (i = 0; i < 32; i++) {
+                        hexsha256[2 * i] = \
+                            hexchars[(dyn->hash256[i] & 0xf0) >> 4];
+                        hexsha256[2 * i + 1] = hexchars[dyn->hash256[i] & 0x0f];
+                }
+                hexsha256[64] = '\0';
+                PyDict_SetItemString(pdict, "pkg.content-type.sha256",
+                    Py_BuildValue("s", hexsha256));
+        }
 	goto out;
 
 err:
@@ -275,16 +347,17 @@
 
 out:
 	if (dyn != NULL)
-		dyninfo_free(dyn);
+            dyninfo_free(dyn);
 
-	(void) close(fd);
+	(void) close(dargs.fd);
 	return (pdict);
 }
 
 static PyMethodDef methods[] = {
 	{ "is_elf_object", elf_is_elf_object, METH_VARARGS },
 	{ "get_info", get_info, METH_VARARGS },
-	{ "get_dynamic", get_dynamic, METH_VARARGS },
+	{ "get_dynamic", (PyCFunction)get_dynamic,
+        METH_VARARGS | METH_KEYWORDS},
 	{ NULL, NULL }
 };
 
--- a/src/modules/elfextract.c	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/elfextract.c	Fri Oct 11 16:09:34 2013 +0100
@@ -20,8 +20,7 @@
  */
 
 /*
- * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
- * Use is subject to license terms.
+ *  Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
  */
 
 #include <libelf.h>
@@ -39,8 +38,9 @@
 #include <netinet/in.h>
 #include <inttypes.h>
 #if defined(__SVR4) && defined(__sun)
-/* Solaris has a built-in SHA-1 library interface */
+/* Solaris has built-in SHA-1 and SHA-2 library interfaces */
 #include <sha1.h>
+#include <sha2.h>
 #else
 /*
  * All others can use OpenSSL, but OpenSSL's method signatures
@@ -293,7 +293,8 @@
  * Reads a section in 64k increments, adding it to the hash.
  */
 static int
-readhash(int fd, SHA1_CTX *shc, off_t offset, off_t size)
+readhash(int fd, SHA1_CTX *shc, SHA256_CTX *shc2, off_t offset, off_t size,
+    int sha1, int sha256)
 {
 	off_t n;
 	char hashbuf[64 * 1024];
@@ -313,7 +314,12 @@
 			PyErr_SetFromErrno(PyExc_IOError);
 			return (-1);
 		}
-		SHA1Update(shc, hashbuf, rbytes);
+                if (sha1 > 0) {
+		        SHA1Update(shc, hashbuf, rbytes);
+                }
+                if (sha256 > 0) {
+                        SHA256Update(shc2, hashbuf, rbytes);
+                }
 		size -= rbytes;
 	} while (size != 0);
 
@@ -325,9 +331,11 @@
  * information we want from an ELF file.  Returns NULL
  * if it can't find everything (eg. not ELF file, wrong
  * class of ELF file).
+ * If sha1 is > 0, we produce an SHA1 hash as part of the returned dictionary.
+ * If sha256 is > 0, we include an SHA2 256 hash in the returned dictionary.
  */
 dyninfo_t *
-getdynamic(int fd)
+getdynamic(int fd, int sha1, int sha256)
 {
 	Elf		*elf = NULL;
 	Elf_Scn		*scn = NULL;
@@ -342,6 +350,7 @@
 	int		t = 0, num_dyn = 0, dynstr = -1;
 
 	SHA1_CTX	shc;
+        SHA256_CTX      shc2;
 	dyninfo_t	*dyn = NULL;
 
 	liblist_t	*deps = NULL;
@@ -374,7 +383,12 @@
 	}
 
 	/* get useful sections */
-	SHA1Init(&shc);
+        if (sha1 > 0) {
+                SHA1Init(&shc);
+        }
+        if (sha256 > 0) {
+                SHA256Init(&shc2);
+        }
 	while ((scn = elf_nextscn(elf, scn))) {
 		if (gelf_getshdr(scn, &shdr) != &shdr) {
 			PyErr_SetString(ElfError, elf_errmsg(-1));
@@ -386,7 +400,7 @@
 			goto bad;
 		}
 
-		if (hashsection(name)) {
+		if (hashsection(name) && (sha1 > 0 || sha256 > 0)) {
 			if (shdr.sh_type == SHT_NOBITS) {
 				/*
 				 * We can't just push shdr.sh_size into
@@ -398,12 +412,18 @@
 				uint64_t mask = 0xffffffff00000000ULL;
 				uint32_t top = htonl((uint32_t)((n & mask) >> 32));
 				uint32_t bot = htonl((uint32_t)n);
-				SHA1Update(&shc, &top, sizeof (top));
-				SHA1Update(&shc, &bot, sizeof (bot));
+                                if (sha1 > 0) {
+				        SHA1Update(&shc, &top, sizeof (top));
+                                        SHA1Update(&shc, &bot, sizeof (bot));
+                                }
+                                if (sha256 > 0) {
+                                        SHA256Update(&shc2, &top, sizeof (top));
+                                        SHA256Update(&shc2, &bot, sizeof (bot));
+                                }
 			} else {
 				int hash;
-				hash = readhash(fd, &shc, shdr.sh_offset,
-				    shdr.sh_size);
+                                hash = readhash(fd, &shc, &shc2, shdr.sh_offset,
+				    shdr.sh_size, sha1, sha256);
 
 				if (hash == -1)
 					goto bad;
@@ -584,8 +604,12 @@
 	dyn->deps = deps;
 	dyn->def = def;
 	dyn->vers = verdef;
-	SHA1Final(dyn->hash, &shc);
-
+        if (sha1 > 0) {
+	        SHA1Final(dyn->hash, &shc);
+        }
+        if (sha256 > 0) {
+                SHA256Final(dyn->hash256, &shc2);
+        }
 	return (dyn);
 
 bad:
--- a/src/modules/elfextract.h	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/elfextract.h	Fri Oct 11 16:09:34 2013 +0100
@@ -20,8 +20,7 @@
  */
 
 /*
- * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
- * Use is subject to license terms.
+ *  Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
  */
 
 #ifndef _ELFEXTRACT_H
@@ -48,6 +47,8 @@
 	liblist_t 	*vers;		/* version provided list (also	   */
 					/* 	contains offsets)	   */
 	unsigned char	hash[20];	/* SHA1 Hash of significant segs.  */
+     	unsigned char	hash256[32];	/* SHA2 Hash of significant segs.  */
+
 	Elf		*elf;		/* elf data -- must be freed	   */
 } dyninfo_t;
 
@@ -61,7 +62,7 @@
 
 extern int iself(int fd);
 extern int iself32(int fd);
-extern dyninfo_t *getdynamic(int fd);
+extern dyninfo_t *getdynamic(int fd, int sha1, int sha256);
 extern void dyninfo_free(dyninfo_t *dyn);
 extern hdrinfo_t *getheaderinfo(int fd);
 
--- a/src/modules/flavor/base.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/flavor/base.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -417,9 +417,8 @@
         try:
                 new_paths = run_paths
                 index = run_paths.index(PD_DEFAULT_RUNPATH)
-                if index >= 0:
-                        new_paths = run_paths[:index] + \
-                            default_runpath + run_paths[index + 1:]
+                new_paths = run_paths[:index] + \
+                    default_runpath + run_paths[index + 1:]
                 if PD_DEFAULT_RUNPATH in new_paths:
                         raise MultipleDefaultRunpaths()
                 return new_paths
--- a/src/modules/flavor/depthlimitedmf.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/flavor/depthlimitedmf.py	Fri Oct 11 16:09:34 2013 +0100
@@ -2,19 +2,37 @@
 # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Python
 # Software Foundation; All Rights Reserved
 #
-# Copyright (c) 2010, 2011, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
 
 
-"""A version of ModuleFinder which limits the depth of exploration for loaded
-modules and discovers where a module might be loaded instead of determining
-which path contains a module to be loaded."""
+"""A standalone version of ModuleFinder which limits the depth of exploration
+for loaded modules and discovers where a module might be loaded instead of
+determining which path contains a module to be loaded.  It is designed to be
+run by any version of python against its set of modules.  To communicate its
+results to the process which ran it, it prints output to stdout.  The format is
+to start a line with 'DEP ' if it contains information about a dependency, and
+'ERR ' if it contains information about a module it couldn't analyze."""
+
+# This module cannot import other pkg modules because pkg modules are not
+# delivered for all versions of python.  Because of this, we have to duplicate
+# code in a couple of places, and we also have to be careful to use the pkg
+# modules when not running standalone.
+#
+# We also have to be careful to make the code in this module compliant with both
+# Python 2 and Python 3 syntax.
+
+if __name__ != "__main__":
+        import pkg.flavor.base as base
 
 import modulefinder
 import os
-import pkg.flavor.base as base
 import sys
 
-from pkg.portable import PD_DEFAULT_RUNPATH
+# A string used as a component of the pkg.depend.runpath value as a special
+# token to determine where to insert the runpath that pkgdepend generates itself
+# (duplicated from pkg.portable.__init__ for reasons above)
+PD_DEFAULT_RUNPATH = "$PKGDEPEND_RUNPATH"
+
 python_path = "PYTHONPATH"
 
 class ModuleInfo(object):
@@ -61,6 +79,23 @@
                 return "name:%s suffixes:%s dirs:%s" % (self.name,
                     " ".join(self.suffixes), len(self.dirs))
 
+
+if __name__ == "__main__":
+        class MultipleDefaultRunPaths(Exception):
+
+                def __unicode__(self):
+                        # To workaround python issues 6108 and 2517, this
+                        # provides a a standard wrapper for this class'
+                        # exceptions so that they have a chance of being
+                        # stringified correctly.
+                        return str(self)
+
+                def __str__(self):
+                        return _(
+                            "More than one $PKGDEPEND_RUNPATH token was set on "
+                            "the same action in this manifest.")
+
+
 class DepthLimitedModuleFinder(modulefinder.ModuleFinder):
 
         def __init__(self, install_dir, *args, **kwargs):
@@ -91,10 +126,27 @@
                 new_path.append(install_dir)
 
                 if run_paths:
-                        # add our detected runpath into the user-supplied one
-                        # (if any)
-                        new_path = base.insert_default_runpath(new_path,
-                            run_paths)
+                        if __name__ != "__main__":
+                                # add our detected runpath into the
+                                # user-supplied one (if any)
+                                new_path = base.insert_default_runpath(new_path,
+                                    run_paths)
+                        else:
+                                # This is a copy of the above function call.
+                                # insert our default search path where the
+                                # PD_DEFAULT_RUNPATH token was found
+                                try:
+                                        index = run_paths.index(
+                                            PD_DEFAULT_RUNPATH)
+                                        run_paths = run_paths[:index] + \
+                                            new_path + run_paths[index + 1:]
+                                        if PD_DEFAULT_RUNPATH in run_paths:
+                                                raise MultipleDefaultRunPaths()
+                                except ValueError:
+                                        # no PD_DEFAULT_PATH token, so we
+                                        # override the whole default search path
+                                        pass
+                                new_path = run_paths
 
                 modulefinder.ModuleFinder.__init__(self, path=new_path,
                     *args, **kwargs)
@@ -130,7 +182,7 @@
                         m.__code__ = co
                         try:
                                 res.extend(self.scan_code(co, m))
-                        except ImportError, msg:
+                        except ImportError as msg:
                                 self.msg(2, "ImportError:", str(msg), fqname,
                                     pathname)
                                 self._add_badmodule(fqname, m)
@@ -196,7 +248,7 @@
                         return []
                 try:
                         res.extend(self.import_hook(name, caller, level=level))
-                except ImportError, msg:
+                except ImportError as msg:
                         self.msg(2, "ImportError:", str(msg))
                         self._add_badmodule(name, caller)
                 else:
@@ -312,3 +364,25 @@
 
                 self.msgout(4, "load_tail ->", q)
                 return res
+
+
+if __name__ == "__main__":
+        """Usage:
+              depthlimitedmf.py <install_path> <script>
+                  [ run_path run_path ... ]
+        """
+        run_paths = sys.argv[3:]
+        try:
+                mf = DepthLimitedModuleFinder(sys.argv[1], run_paths=run_paths)
+                loaded_modules = mf.run_script(sys.argv[2])
+                for res in set([
+                    (tuple(m.get_file_names()), tuple(m.dirs))
+                    for m in loaded_modules
+                ]):
+                        sys.stdout.write("DEP %s\n" % (res,))
+                missing, maybe =  mf.any_missing_maybe()
+                sys.stdout.writelines(("ERR %s\n" % name for name in missing))
+        except ValueError as e:
+                sys.stdout.write("ERR %s\n" % e)
+        except MultipleDefaultRunPaths as e:
+                sys.stdout.write("%s\n" % e)
--- a/src/modules/flavor/depthlimitedmf27.py	Wed Oct 02 09:22:55 2013 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,372 +0,0 @@
-#!/usr/bin/python
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Python
-# Software Foundation; All Rights Reserved
-#
-# Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
-
-
-"""A standalone version of ModuleFinder which limits the depth of exploration
-for loaded modules and discovers where a module might be loaded instead of
-determining which path contains a module to be loaded.  It is designed to be run
-by python2.7 against 2.7 modules.  To communicate its results to the process
-which ran it, it prints output to stdout.  The format is to start a line with
-'DEP ' if it contains information about a dependency, and 'ERR ' if it contains
-information about a module it couldn't analyze."""
-
-# This module cannot import other pkg modules because pkg modules are not
-# delivered for python 2.7.
-
-import modulefinder
-import os
-import sys
-
-# A string used as a component of the pkg.depend.runpath value as a special
-# token to determine where to insert the runpath that pkgdepend generates itself
-# (duplicated from pkg.portable.__init__ for reasons above)
-PD_DEFAULT_RUNPATH = "$PKGDEPEND_RUNPATH"
-
-python_path = "PYTHONPATH"
-
-class ModuleInfo(object):
-        """This class contains information about from where a python module
-        might be loaded."""
-
-        def __init__(self, name, dirs, builtin=False):
-                """Build a ModuleInfo object.
-
-                The 'name' parameter is the name of the module.
-
-                The 'dirs' parameter is the list of directories where the module
-                might be found.
-
-                The 'builtin' parameter sets whether the module is a python
-                builtin (like sys)."""
-
-                self.name = name
-                self.builtin = builtin
-                self.suffixes = [".py", ".pyc", ".pyo", "/__init__.py", ".so",
-                    "module.so"]
-                self.dirs = sorted(dirs)
-
-        def make_package(self):
-                """Declare that this module is a package."""
-
-                if self.dirs:
-                        self.suffixes = ["/__init__.py"]
-                else:
-                        self.suffixes = []
-
-        def get_package_dirs(self):
-                """Get the directories where this package might be defined."""
-
-                return [os.path.join(p, self.name) for p in self.dirs]
-
-        def get_file_names(self):
-                """Return all the file names under which this module might be
-                found."""
-
-                return ["%s%s" % (self.name, suf) for suf in self.suffixes]
-
-        def __str__(self):
-                return "name:%s suffixes:%s dirs:%s" % (self.name,
-                    " ".join(self.suffixes), len(self.dirs))
-
-
-class MultipleDefaultRunPaths(Exception):
-
-        def __unicode__(self):
-                # To workaround python issues 6108 and 2517, this provides a
-                # a standard wrapper for this class' exceptions so that they
-                # have a chance of being stringified correctly.
-                return str(self)
-
-        def __str__(self):
-                return _(
-                    "More than one $PKGDEPEND_RUNPATH token was set on the "
-                    "same action in this manifest.")
-
-
-class DepthLimitedModuleFinder(modulefinder.ModuleFinder):
-
-        def __init__(self, install_dir, *args, **kwargs):
-                """Produce a module finder that ignores PYTHONPATH and only
-                reports the direct imports of a module.
-
-                run_paths as a keyword argument specifies a list of additional
-                paths to use when searching for modules."""
-
-                # ModuleFinder.__init__ doesn't expect run_paths
-                run_paths = kwargs.pop("run_paths", [])
-
-                # Check to see whether a python path has been set.
-                if python_path in os.environ:
-                        py_path = [
-                            os.path.normpath(fp)
-                            for fp in os.environ[python_path].split(os.pathsep)
-                        ]
-                else:
-                        py_path = []
-
-                # Remove any paths that start with the defined python paths.
-                new_path = [
-                    fp
-                    for fp in sys.path[1:]
-                    if not self.startswith_path(fp, py_path)
-                ]
-                new_path.append(install_dir)
-
-                if run_paths:
-                        # insert our default search path where the
-                        # PD_DEFAULT_RUNPATH token was found
-                        try:
-                                index = run_paths.index(PD_DEFAULT_RUNPATH)
-                                if index >= 0:
-                                        run_paths = run_paths[:index] + \
-                                            new_path + run_paths[index + 1:]
-                                if PD_DEFAULT_RUNPATH in run_paths:
-                                        raise MultipleDefaultRunPaths()
-                        except ValueError:
-                                # no PD_DEFAULT_PATH token, so we override the
-                                # whole default search path
-                                pass
-                        new_path = run_paths
-
-                modulefinder.ModuleFinder.__init__(self, path=new_path,
-                    *args, **kwargs)
-
-        @staticmethod
-        def startswith_path(path, lst):
-                for l in lst:
-                        if path.startswith(l):
-                                return True
-                return False
-
-        def run_script(self, pathname):
-                """Find all the modules the module at pathname directly
-                imports."""
-
-                fp = open(pathname, "r")
-                return self.load_module('__main__', fp, pathname)
-
-        def load_module(self, fqname, fp, pathname):
-                """This code has been slightly modified from the function of
-                the parent class. Specifically, it checks the current depth
-                of the loading and halts if it exceeds the depth that was given
-                to run_script."""
-
-                self.msgin(2, "load_module", fqname, fp and "fp", pathname)
-                co = compile(fp.read()+'\n', pathname, 'exec')
-                m = self.add_module(fqname)
-                m.__file__ = pathname
-                res = []
-                if co:
-                        if self.replace_paths:
-                                co = self.replace_paths_in_code(co)
-                        m.__code__ = co
-                        try:
-                                res.extend(self.scan_code(co, m))
-                        except ImportError, msg:
-                                self.msg(2, "ImportError:", str(msg), fqname,
-                                    pathname)
-                                self._add_badmodule(fqname, m)
-
-                self.msgout(2, "load_module ->", m)
-                return res
-
-        def scan_code(self, co, m):
-                """Scan the code looking for import statements."""
-
-                res = []
-                code = co.co_code
-                if sys.version_info >= (2, 5):
-                        scanner = self.scan_opcodes_25
-                else:
-                        scanner = self.scan_opcodes
-                for what, args in scanner(co):
-                        if what == "store":
-                                name, = args
-                                m.globalnames[name] = 1
-                        elif what in ("import", "absolute_import"):
-                                fromlist, name = args
-                                have_star = 0
-                                if fromlist is not None:
-                                        if "*" in fromlist:
-                                                have_star = 1
-                                        fromlist = [
-                                            f for f in fromlist if f != "*"
-                                        ]
-                                if what == "absolute_import": level = 0
-                                else: level = -1
-                                res.extend(self._safe_import_hook(name, m,
-                                    fromlist, level=level))
-                        elif what == "relative_import":
-                                level, fromlist, name = args
-                                if name:
-                                        res.extend(self._safe_import_hook(name,
-                                            m, fromlist, level=level))
-                                else:
-                                        parent = self.determine_parent(m,
-                                            level=level)
-                                        res.extend(self._safe_import_hook(
-                                            parent.__name__, None, fromlist,
-                                            level=0))
-                        else:
-                                # We don't expect anything else from the
-                                # generator.
-                                raise RuntimeError(what)
-
-                for c in co.co_consts:
-                        if isinstance(c, type(co)):
-                                res.extend(self.scan_code(c, m))
-                return res
-
-
-        def _safe_import_hook(self, name, caller, fromlist, level=-1):
-                """Wrapper for self.import_hook() that won't raise ImportError.
-                """
-
-                res = []
-                if name in self.badmodules:
-                        self._add_badmodule(name, caller)
-                        return []
-                try:
-                        res.extend(self.import_hook(name, caller, level=level))
-                except ImportError, msg:
-                        self.msg(2, "ImportError:", str(msg))
-                        self._add_badmodule(name, caller)
-                else:
-                        if fromlist:
-                                for sub in fromlist:
-                                        if sub in self.badmodules:
-                                                self._add_badmodule(sub, caller)
-                                                continue
-                                        res.extend(self.import_hook(name,
-                                            caller, [sub], level=level))
-                return res
-
-        def import_hook(self, name, caller=None, fromlist=None, level=-1):
-                """Find all the modules that importing name will import."""
-
-                # Special handling for os.path is needed because the os module
-                # manipulates sys.modules directly to provide both os and
-                # os.path.
-                if name == "os.path":
-                        self.msg(2, "bypassing os.path import - importing os "
-                            "instead", name, caller, fromlist, level)
-                        name = "os"
-
-                self.msg(3, "import_hook", name, caller, fromlist, level)
-                parent = self.determine_parent(caller, level=level)
-                q, tail = self.find_head_package(parent, name)
-                if not tail:
-                        # If q is a builtin module, don't report it because it
-                        # doesn't live in the normal module space and it's part
-                        # of python itself, which is handled by a different
-                        # kind of dependency.
-                        if isinstance(q, ModuleInfo) and q.builtin:
-                                return []
-                        elif isinstance(q, modulefinder.Module):
-                                name = q.__name__
-                                path = q.__path__
-                                # some Module objects don't get a path
-                                if not path:
-                                        if name in sys.builtin_module_names or \
-                                            name == "__future__":
-                                                return [ModuleInfo(name, [],
-                                                    builtin=True)]
-                                        else:
-                                                return [ModuleInfo(name, [])]
-                                return [ModuleInfo(name, path)]
-                        else:
-                                return [q]
-                res = self.load_tail(q, tail)
-                q.make_package()
-                res.append(q)
-                return res
-
-        def import_module(self, partname, fqname, parent):
-                """Find where this module lives relative to its parent."""
-
-                parent_dirs = None
-                self.msgin(3, "import_module", partname, fqname, parent)
-                try:
-                        m = self.modules[fqname]
-                except KeyError:
-                        pass
-                else:
-                        self.msgout(3, "import_module ->", m)
-                        return m
-                if fqname in self.badmodules:
-                        self.msgout(3, "import_module -> None")
-                        return None
-                if parent:
-                        if not parent.dirs:
-                                self.msgout(3, "import_module -> None")
-                                return None
-                        else:
-                                parent_dirs = parent.get_package_dirs()
-                try:
-                        mod = self.find_module(partname, parent_dirs, parent)
-                except ImportError:
-                        self.msgout(3, "import_module ->", None)
-                        return None
-                return mod
-
-        def find_module(self, name, path, parent=None):
-                """Calculate the potential paths on the file system where the
-                module could be found."""
-
-                if not path:
-                    if name in sys.builtin_module_names or name == "__future__":
-                            return ModuleInfo(name, [], builtin=True)
-                    path = self.path
-                return ModuleInfo(name, path)
-
-        def load_tail(self, q, tail):
-                """Determine where each component of a multilevel import would
-                be found on the file system."""
-
-                self.msgin(4, "load_tail", q, tail)
-                res = []
-                name = q.name
-                cur_parent = q
-                while tail:
-                        i = tail.find('.')
-                        if i < 0: i = len(tail)
-                        head, tail = tail[:i], tail[i+1:]
-                        new_name = "%s.%s" % (name, head)
-                        r = self.import_module(head, new_name, cur_parent)
-                        res.append(r)
-                        name = new_name
-                        cur_parent = r
-
-                # All but the last module found must be packages because they
-                # contained other packages.
-                for i in range(0, len(res) - 1):
-                        res[i].make_package()
-
-                self.msgout(4, "load_tail ->", q)
-                return res
-
-
-if __name__ == "__main__":
-        """Usage:
-              depthlimitedmf27.py <install_path> <script>
-                  [ run_path run_path ... ]
-        """
-        run_paths = sys.argv[3:]
-        try:
-                mf = DepthLimitedModuleFinder(sys.argv[1], run_paths=run_paths)
-                loaded_modules = mf.run_script(sys.argv[2])
-                for res in set([
-                    (tuple(m.get_file_names()), tuple(m.dirs))
-                    for m in loaded_modules
-                ]):
-                        print "DEP %s" % (res,)
-                missing, maybe =  mf.any_missing_maybe()
-                for name in missing:
-                        print "ERR %s" % name,
-        except ValueError, e:
-                print "ERR %s" % e
-        except MultipleDefaultRunPaths, e:
-                print e
--- a/src/modules/flavor/elf.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/flavor/elf.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -176,7 +176,7 @@
 
         try:
                 ei = elf.get_info(proto_file)
-                ed = elf.get_dynamic(proto_file)
+                ed = elf.get_dynamic(proto_file, sha1=False, sha256=False)
         except elf.ElfError, e:
                 raise BadElfFile(proto_file, e)
         deps = [
--- a/src/modules/flavor/python.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/flavor/python.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -278,8 +278,7 @@
         # version of python running, it's necessary to fork and run the
         # appropriate version of python.
         root_dir = os.path.dirname(__file__)
-        exec_file = os.path.join(root_dir,
-            "depthlimitedmf%s%s.py" % (analysis_major, analysis_minor))
+        exec_file = os.path.join(root_dir, "depthlimitedmf.py")
         cmd = ["python%s.%s" % (analysis_major, analysis_minor), exec_file,
             os.path.dirname(action.attrs["path"]), local_file]
 
--- a/src/modules/lint/pkglint_action.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/lint/pkglint_action.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2010, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 from pkg.lint.engine import lint_fmri_successor
@@ -672,7 +672,7 @@
                                             "across actions in %(fmris)s") %
                                             {"fmris": " ".join(plist),
                                             "path": p}, msgid=missing_id)
-                
+
                 if len(set([ac.name for ac, pfmri in action_types])) > 1:
                         plist = set([])
                         for ac, pfmri in action_types:
@@ -1507,3 +1507,23 @@
 
         version_incorporate.pkglint_desc = _("'incorporate' dependencies should"
             " have a version.")
+
+        def facet_value(self, action, manifest, engine, pkglint_id="012"):
+                """facet values should be set to a valid value in pkg(5)"""
+
+                for key in action.attrs.keys():
+                        if key.startswith("facet"):
+                                value = action.attrs[key].lower()
+                                if value not in ["true", "false", "all"]:
+                                        engine.warning(
+                                            _("facet value should be set to "
+                                            "'true', 'false' or 'all' in "
+                                            "attribute name %(key)s "
+                                            "in %(fmri)s") %
+                                           {"key": key,
+                                           "fmri": manifest.fmri},
+                                           msgid="%s%s" % (self.name,
+                                           pkglint_id))
+
+        facet_value.pkglint_desc = _("facet value should be set to "
+            "a valid value in an action attribute")
--- a/src/modules/lint/pkglint_manifest.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/lint/pkglint_manifest.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2010, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 # Some pkg(5) specific lint manifest checks
@@ -305,6 +305,7 @@
                                 continue
 
                         if action.name == "file" and \
+                            action.attrs.get("pkg.filetype") == "elf" or \
                             "elfarch" in action.attrs:
                                 has_arch_file = True
 
--- a/src/modules/manifest.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/manifest.py	Fri Oct 11 16:09:34 2013 +0100
@@ -978,7 +978,8 @@
                         if signatures:
                                 # Generate manifest signature based upon
                                 # input content, but only if signatures
-                                # were requested.
+                                # were requested. In order to interoperate with
+                                # older clients, we must use sha-1 here.
                                 self.signatures = {
                                     "sha-1": self.hash_create(content)
                                 }
@@ -1206,6 +1207,8 @@
                 """This method takes a string representing the on-disk
                 manifest content, and returns a hash value."""
 
+                # This must be an SHA-1 hash in order to interoperate with
+                # older clients.
                 sha_1 = hashlib.sha1()
                 if isinstance(mfstcontent, unicode):
                         # Byte stream expected, so pass encoded.
--- a/src/modules/misc.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/misc.py	Fri Oct 11 16:09:34 2013 +0100
@@ -34,7 +34,6 @@
 import errno
 import fnmatch
 import getopt
-import hashlib
 import itertools
 import locale
 import os
@@ -62,6 +61,7 @@
 
 import pkg.client.api_errors as api_errors
 import pkg.portable as portable
+import pkg.digest as digest
 
 from pkg import VERSION
 from pkg.client import global_settings
@@ -330,13 +330,22 @@
 
         return False
 
-def gunzip_from_stream(gz, outfile):
+def gunzip_from_stream(gz, outfile, hash_func=None, hash_funcs=None,
+    ignore_hash=False):
         """Decompress a gzipped input stream into an output stream.
 
         The argument 'gz' is an input stream of a gzipped file and 'outfile'
         is is an output stream.  gunzip_from_stream() decompresses data from
-        'gz' and writes it to 'outfile', and returns the hexadecimal SHA-1 sum
-        of that data.
+        'gz' and writes it to 'outfile', and returns the hexadecimal SHA sum
+        of that data using the hash_func supplied.
+
+        'hash_funcs', if supplied, is a list of hash functions which we should
+        use to compute the hash. If 'hash_funcs' is supplied, a list of
+        hexadecimal digests computed using those functions is returned. The
+        returned list is in the same order as 'hash_funcs'.
+
+        If 'ignore_hash' is False, we do not compute a hash when decompressing
+        the content and do not return any value.
         """
 
         FHCRC = 2
@@ -344,6 +353,9 @@
         FNAME = 8
         FCOMMENT = 16
 
+        if not (hash_func or hash_funcs) and not ignore_hash:
+                raise ValueError("no hash functions for gunzip_from_stream")
+
         # Read the header
         magic = gz.read(2)
         if magic != "\037\213":
@@ -378,20 +390,46 @@
         if flag & FHCRC:
                 gz.read(2)
 
-        shasum = hashlib.sha1()
+        if ignore_hash:
+                pass
+        elif hash_funcs:
+                shasums = []
+                for f in hash_funcs:
+                        shasums.append(digest.HASH_ALGS[f]())
+        else:
+                shasum = hash_func()
         dcobj = zlib.decompressobj(-zlib.MAX_WBITS)
 
         while True:
                 buf = gz.read(64 * 1024)
                 if buf == "":
                         ubuf = dcobj.flush()
-                        shasum.update(ubuf) # pylint: disable=E1101
+                        if ignore_hash:
+                                pass
+                        elif hash_funcs:
+                                for sha in shasums:
+                                        sha.update(ubuf)
+                        else:
+                                shasum.update(ubuf) # pylint: disable=E1101
                         outfile.write(ubuf)
                         break
                 ubuf = dcobj.decompress(buf)
-                shasum.update(ubuf) # pylint: disable=E1101
+                if ignore_hash:
+                        pass
+                elif hash_funcs:
+                        for sha in shasums:
+                                sha.update(ubuf)
+                else:
+                        shasum.update(ubuf) # pylint: disable=E1101
                 outfile.write(ubuf)
 
+        if ignore_hash:
+                return
+        elif hash_funcs:
+                hexdigests = []
+                for sha in shasums:
+                        hexdigests.append(sha.hexdigest())
+                return hexdigests
         return shasum.hexdigest()
 
 class PipeError(Exception):
@@ -504,8 +542,10 @@
 
         return int(size)
 
-def get_data_digest(data, length=None, return_content=False):
-        """Returns a tuple of (SHA-1 hexdigest, content).
+def get_data_digest(data, length=None, return_content=False,
+    hash_attrs=None, hash_algs=None, hash_func=None):
+        """Returns a tuple of ({hash attribute name: hash value}, content)
+        or a tuple of (hash value, content) if hash_attrs has only one element.
 
         'data' should be a file-like object or a pathname to a file.
 
@@ -514,7 +554,20 @@
 
         'return_content' is a boolean value indicating whether the
         second tuple value should contain the content of 'data' or
-        if the content should be discarded during processing."""
+        if the content should be discarded during processing.
+
+        'hash_attrs' is a list of keys describing the hashes we want to compute
+        for this data. The keys must be present in 'hash_algs', a dictionary
+        mapping keys to the factory methods that are used to create objects
+        to compute them. The factory method must take no parameters, and must
+        return an object that has 'update()' and 'hexdigest()' methods. In the
+        current implementation, these are all hashlib factory methods.
+
+        'hash_func' is provided as a convenience to simply hash the data with
+        a single hash algorithm. The value of 'hash_func' should be the factory
+        method used to compute that hash value, as described in the previous
+        paragraph.
+        """
 
         bufsz = 128 * 1024
         closefobj = False
@@ -527,15 +580,31 @@
         if length is None:
                 length = os.stat(data).st_size
 
-        # Read the data in chunks and compute the SHA1 hash as it comes in.  A
-        # large read on some platforms (e.g. Windows XP) may fail.
+        # Setup our results dictionary so that each attribute maps to a
+        # new hashlib object.
+        if hash_func:
+                hsh = hash_func()
+        else:
+                if hash_algs is None or hash_attrs is None:
+                        assert False, "get_data_digest without hash_attrs/algs"
+                hash_results = {}
+                for attr in hash_attrs:
+                        hash_results[attr] = hash_algs[attr]()
+
+        # Read the data in chunks and compute the SHA hashes as the data comes
+        # in.  A large read on some platforms (e.g. Windows XP) may fail.
         content = cStringIO.StringIO()
-        fhash = hashlib.sha1()
         while length > 0:
                 data = f.read(min(bufsz, length))
                 if return_content:
                         content.write(data)
-                fhash.update(data) # pylint: disable=E1101
+                if hash_func:
+                        hsh.update(data)
+                else:
+                        # update each hash with this data
+                        for attr in hash_attrs:
+                                hash_results[attr].update(
+                                    data) # pylint: disable=E1101
 
                 l = len(data)
                 if l == 0:
@@ -545,13 +614,30 @@
         if closefobj:
                 f.close()
 
-        return fhash.hexdigest(), content.read()
+        if hash_func:
+                return hsh.hexdigest(), content.read()
+
+        # The returned dictionary can now be populated with the hexdigests
+        # instead of the hashlib objects themselves.
+        for attr in hash_results:
+                hash_results[attr] = hash_results[attr].hexdigest()
+        return hash_results, content.read()
 
 def compute_compressed_attrs(fname, file_path, data, size, compress_dir,
-    bufsz=64*1024):
-        """Returns the size and hash of the compressed data.  If the file
-        located at file_path doesn't exist or isn't gzipped, it creates a file
-        in compress_dir named fname."""
+    bufsz=64*1024, chash_attrs=None, chash_algs=None):
+        """Returns the size and one or more hashes of the compressed data.  If
+        the file located at file_path doesn't exist or isn't gzipped, it creates
+        a file in compress_dir named fname.
+
+        'chash_attrs' is a list of the chash attributes we should compute, with
+        'chash_algs' being a dictionary that maps the attribute names to the
+        algorithms used to compute them.
+        """
+
+        if chash_attrs is None:
+                chash_attrs = digest.DEFAULT_CHASH_ATTRS
+        if chash_algs is None:
+                chash_algs = digest.CHASH_ALGS
 
         #
         # This check prevents compressing a file which is already compressed.
@@ -597,14 +683,18 @@
         # to generate deterministic hashes for different files with identical
         # content.
         cfile = open(opath, "rb")
-        chash = hashlib.sha1()
+        chashes = {}
+        for chash_attr in chash_attrs:
+                chashes[chash_attr] = chash_algs[chash_attr]()
         while True:
                 cdata = cfile.read(bufsz)
                 if cdata == "":
                         break
-                chash.update(cdata) # pylint: disable=E1101
+                for chash_attr in chashes:
+                        chashes[chash_attr].update(
+                            cdata) # pylint: disable=E1101
         cfile.close()
-        return csize, chash
+        return csize, chashes
 
 class ProcFS(object):
         """This class is used as an interface to procfs."""
--- a/src/modules/p5p.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/p5p.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import atexit
@@ -34,6 +34,7 @@
 import pkg
 import pkg.client.api_errors as apx
 import pkg.client.publisher
+import pkg.digest as digest
 import pkg.fmri
 import pkg.manifest
 import pkg.misc
@@ -545,7 +546,7 @@
 
                 'pathname' is an optional string specifying the absolute path
                 of a file to add to the archive.  The file may be a regular
-                file, directory, symbolic link, or hard link. 
+                file, directory, symbolic link, or hard link.
 
                 'arcname' is an optional string specifying an alternative name
                 for the file in the archive.  If not given, the full pathname
@@ -667,16 +668,26 @@
                 # payload.  (That payload can consist of multiple files.)
                 file_dir = os.path.join(pub_dir, "file")
                 for a in m.gen_actions():
-                        if not a.has_payload or not a.hash:
+                        if not a.has_payload:
                                 # Nothing to archive.
                                 continue
 
-                        payloads = set([a.hash])
+                        pref_hattr, hval, hfunc = \
+                            digest.get_least_preferred_hash(a)
+                        if not hval:
+                                # Nothing to archive
+                                continue
+
+                        payloads = set([hval])
 
                         # Signature actions require special handling.
                         if a.name == "signature":
-                                payloads.update(a.attrs.get("chain",
-                                    "").split())
+                                pref_cattr, chain_val, func = \
+                                    digest.get_least_preferred_hash(a,
+                                    hash_type=digest.CHAIN)
+
+                                for chain in chain_val.split():
+                                        payloads.add(chain)
 
                                 if repo:
                                         # This bit of logic only possible if
@@ -1221,7 +1232,7 @@
                         # A new publisher object is created with a copy of only
                         # the information that's needed for the archive.
                         npub = pkg.client.publisher.Publisher(pub.prefix,
-                            alias=pub.alias, 
+                            alias=pub.alias,
                             revoked_ca_certs=pub.revoked_ca_certs,
                             approved_ca_certs=pub.approved_ca_certs)
 
--- a/src/modules/p5s.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/p5s.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,14 +21,14 @@
 #
 
 #
-# Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import copy
-import hashlib
 import os
 import pkg.client.api_errors as api_errors
 import pkg.client.publisher as publisher
+import pkg.digest as digest
 import pkg.fmri as fmri
 import simplejson as json
 import urllib
@@ -74,7 +74,7 @@
                                     system=True)]
                         res.append(r)
                 return res
-        
+
         try:
                 dump_struct = json.loads(data)
         except ValueError, e:
@@ -102,7 +102,7 @@
                         alias = p.get("alias", None)
                         prefix = p.get("name", None)
                         sticky = p.get("sticky", True)
-                        
+
                         if not prefix:
                                 prefix = "Unknown"
 
@@ -190,7 +190,8 @@
                                 # to communicate with the system repository.
                                 res.append("http://%s/%s/%s" %
                                     (publisher.SYSREPO_PROXY, prefix,
-                                    hashlib.sha1(m.uri.rstrip("/")).hexdigest()
+                                    digest.DEFAULT_HASH_FUNC(
+                                    m.uri.rstrip("/")).hexdigest()
                                     ))
                         else:
                                 assert False, "%s is an unknown scheme." % \
--- a/src/modules/query_parser.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/query_parser.py	Fri Oct 11 16:09:34 2013 +0100
@@ -44,6 +44,7 @@
 from pkg.misc import EmptyI
 
 FILE_OPEN_TIMEOUT_SECS = 1
+MAX_TOKEN_COUNT = 100
 
 class QueryLexer(object):
         """This class defines the lexer used to separate parse queries into
@@ -374,6 +375,19 @@
                 return str(self)
 
 
+class QueryLengthExceeded(QueryException):
+
+        def __init__(self, token_cnt):
+                QueryException.__init__(self)
+                self.token_cnt = token_cnt
+
+        def __str__(self):
+                return _("The number of terms in the query is %(len)i, "
+                    "which exceeds the maximum supported "
+                    "value of %(maxt)i terms.") % { "len": self.token_cnt,
+                    "maxt": MAX_TOKEN_COUNT }
+
+
 class DetailedValueError(QueryException):
 
         def __init__(self, name, bad_value, whole_query):
@@ -449,6 +463,10 @@
                 The "start_point" parameter is the number of results to skip
                 before returning results to the querier."""
 
+                token_cnt = len(text.split(" "))
+                if token_cnt > MAX_TOKEN_COUNT:
+                         raise QueryLengthExceeded(token_cnt)
+
                 self.text = text
                 self.case_sensitive = case_sensitive
                 self.return_type = return_type
--- a/src/modules/search_storage.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/search_storage.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,8 +21,7 @@
 #
 
 #
-# Copyright 2010 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
+# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -614,21 +613,25 @@
 class IndexStoreSetHash(IndexStoreBase):
         def __init__(self, file_name):
                 IndexStoreBase.__init__(self, file_name)
+                # In order to interoperate with older clients, we must use sha-1
+                # here.
                 self.hash_val = hashlib.sha1().hexdigest()
 
         def set_hash(self, vals):
                 """Set the has value."""
-                self.hash_val = self.calc_hash(vals) 
+                self.hash_val = self.calc_hash(vals)
 
         def calc_hash(self, vals):
                 """Calculate the hash value of the sorted members of vals."""
                 vl = list(vals)
                 vl.sort()
+                # In order to interoperate with older clients, we must use sha-1
+                # here.
                 shasum = hashlib.sha1()
                 for v in vl:
                         shasum.update(v)
                 return shasum.hexdigest()
-                
+
         def write_dict_file(self, path, version_num):
                 """Write self.hash_val out to a line in a file """
                 IndexStoreBase._protected_write_dict_file(self, path,
--- a/src/modules/server/api.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/server/api.py	Fri Oct 11 16:09:34 2013 +0100
@@ -575,7 +575,7 @@
                         s = StringIO.StringIO()
                         lpath = self._depot.repo.file(lic.hash, pub=self._pub)
                         lfile = file(lpath, "rb")
-                        misc.gunzip_from_stream(lfile, s)
+                        misc.gunzip_from_stream(lfile, s, ignore_hash=True)
                         text = s.getvalue()
                         s.close()
                         license_lst.append(LicenseInfo(mfst.fmri, lic,
--- a/src/modules/server/depot.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/server/depot.py	Fri Oct 11 16:09:34 2013 +0100
@@ -857,7 +857,7 @@
         file_0._cp_config = { "response.stream": True }
 
         def file_1(self, *tokens):
-                """Outputs the contents of the file, named by the SHA-1 hash
+                """Outputs the contents of the file, named by the SHA hash
                 name in the request path, directly to the client."""
 
                 method = cherrypy.request.method
@@ -1320,7 +1320,8 @@
                                 continue
 
                         with file(lpath, "rb") as lfile:
-                                misc.gunzip_from_stream(lfile, lsummary)
+                                misc.gunzip_from_stream(lfile, lsummary,
+                                    ignore_hash=True)
                 lsummary.seek(0)
 
                 self.__set_response_expires("info", 86400*365, 86400*365)
@@ -2371,7 +2372,8 @@
                     cfg.PropList("address"),
                     cfg.PropDefined("cfg_file", allowed=["", "<pathname>"]),
                     cfg.Property("content_root"),
-                    cfg.PropList("debug", allowed=["", "headers"]),
+                    cfg.PropList("debug", allowed=["", "headers",
+                        "hash=sha256", "hash=sha1+sha256"]),
                     cfg.PropList("disable_ops"),
                     cfg.PropDefined("image_root", allowed=["",
                         "<abspathname>"]),
--- a/src/modules/server/query_parser.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/server/query_parser.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,12 +21,12 @@
 #
 
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import sys
 import pkg.query_parser as qp
-from pkg.query_parser import BooleanQueryException, ParseError, QueryException
+from pkg.query_parser import BooleanQueryException, ParseError, QueryException, QueryLengthExceeded
 
 class QueryLexer(qp.QueryLexer):
         pass
--- a/src/modules/server/repository.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/server/repository.py	Fri Oct 11 16:09:34 2013 +0100
@@ -25,7 +25,6 @@
 import codecs
 import datetime
 import errno
-import hashlib
 import logging
 import os
 import os.path
@@ -44,6 +43,7 @@
 import pkg.client.progress as progress
 import pkg.client.publisher as publisher
 import pkg.config as cfg
+import pkg.digest as digest
 import pkg.file_layout.file_manager as file_manager
 import pkg.file_layout.layout as layout
 import pkg.fmri as fmri
@@ -1325,7 +1325,11 @@
 
         def file(self, fhash):
                 """Returns the absolute pathname of the file specified by the
-                provided SHA1-hash name."""
+                provided SHA-n hash name. (At present, the repository format
+                always uses the least-preferred hash to content in order to
+                remain backwards compatible with older clients. Actions may be
+                published that have additional hashes set, but those do not
+                influence where the content is stored in the repository.)"""
 
                 if not self.file_root:
                         raise RepositoryUnsupportedOperationError()
@@ -1499,23 +1503,29 @@
                         progtrack = progress.NullProgressTracker()
 
                 def get_hashes(pfmri):
-                        """Given an FMRI, return a set containing all of the
-                        hashes of the files its manifest references."""
+                        """Given an FMRI, return a set of tuples containing all
+                        of the hashes of the files its manifest references.
+                        Each tuple is of the form (hash value, hash function)"""
 
                         m = self._get_manifest(pfmri)
                         hashes = set()
                         for a in m.gen_actions():
-                                if not a.has_payload or not a.hash:
+                                if not a.has_payload:
                                         # Nothing to archive.
                                         continue
 
                                 # Action payload.
-                                hashes.add(a.hash)
+                                hattr, hval, hfunc = \
+                                    digest.get_least_preferred_hash(a)
+                                hashes.add(hval)
 
                                 # Signature actions have additional payloads.
                                 if a.name == "signature":
-                                        hashes.update(a.attrs.get("chain",
-                                            "").split())
+                                        chain_attr, chain_val, chain_func = \
+                                            digest.get_least_preferred_hash(a,
+                                            hash_type=digest.CHAIN)
+                                        for chain in chain_val.split():
+                                                hashes.add(chain)
                         return hashes
 
                 self.__lock_rstore()
@@ -1871,8 +1881,15 @@
                 pfmri = reason.get("pkg")
                 if hsh and pfmri:
                         m = self._get_manifest(pfmri)
+                        # this is not terribly efficient, but the expectation is
+                        # that this will rarely happen.
                         for ac in m.gen_actions_by_types(
                             actions.payload_types.keys()):
+                                for hash in digest.DEFAULT_HASH_ATTRS:
+                                        if ac.attrs.get(hash) == hsh:
+                                                fpath = ac.attrs.get("path")
+                                                if fpath:
+                                                        reason["fpath"] = fpath
                                 if ac.hash == hsh:
                                         fpath = ac.attrs.get("path")
                                         if fpath:
@@ -1922,24 +1939,42 @@
                 return error, path, message, reason
 
         def __get_hashes(self, path, pfmri):
-                """Given an PkgFmri, return a set containing all of the
-                hashes of the files its manifest references."""
+                """Given a PkgFmri, return a set containing tuples of all of
+                the hashes of the files its manifest references which should
+                correspond to files in the repository. Each tuple is of the form
+                (file_name, hash_value, hash_func) where hash_func is the
+                function used to compute that hash and file_name is the name
+                of the hash used to store the file in the repository."""
 
                 hashes = set()
                 errors = []
                 try:
                         m = self._get_manifest(pfmri)
                         for a in m.gen_actions():
-                                if not a.has_payload or not a.hash:
+                                if not a.has_payload:
                                         continue
 
+                                # We store files using the least preferred hash
+                                # in the repository to remain as backwards-
+                                # compatible as possible.
+                                attr, fname, hfunc = \
+                                    digest.get_least_preferred_hash(a)
+                                attr, hval, hfunc = \
+                                    digest.get_preferred_hash(a)
                                 # Action payload.
-                                hashes.add(a.hash)
+                                hashes.add((fname, hval, hfunc))
 
                                 # Signature actions have additional payloads
                                 if a.name == "signature":
-                                        hashes.update(
-                                            a.attrs.get("chain", "").split())
+                                        attr, fname, hfunc = \
+                                            digest.get_least_preferred_hash(a,
+                                            hash_type=digest.CHAIN)
+                                        attr, hval, hfunc = \
+                                            digest.get_preferred_hash(a,
+                                            hash_type=digest.CHAIN)
+                                        hashes.update([
+                                            (fname, chain, hfunc)
+                                            for chain in hval.split()])
                 except apx.PermissionsException:
                         errors.append((REPO_VERIFY_MFPERM, path,
                             {"err": _("Permission denied.")}))
@@ -1955,31 +1990,34 @@
                         return (REPO_VERIFY_PERM, path, {"err": str(e),
                             "pkg": pfmri})
 
-        def __verify_hash(self, path, pfmri, h):
-                """Perform hash verification on the given gzip file."""
+        def __verify_hash(self, path, pfmri, h, alg=digest.DEFAULT_HASH_FUNC):
+                """Perform hash verification on the given gzip file.
+                'path' is the full path to the file in the repository. 'pfmri'
+                is the package that we're verifying. 'h' is the expected hash
+                of the path. 'alg' is the hash function used to compute the
+                hash."""
 
                 gzf = None
-                hash = os.path.basename(path)
                 try:
                         gzf = PkgGzipFile(fileobj=open(path, "rb"))
-                        fhash = hashlib.sha1()
+                        fhash = alg()
                         fhash.update(gzf.read())
                         actual = fhash.hexdigest()
                         if actual != h:
                                 return (REPO_VERIFY_BADHASH, path,
-                                    {"actual": actual, "hash": hash,
+                                    {"actual": actual, "hash": h,
                                     "pkg": pfmri})
                 except (ValueError, zlib.error), e:
                         return (REPO_VERIFY_BADGZIP, path,
-                            {"hash": hash, "pkg": pfmri})
+                            {"hash": h, "pkg": pfmri})
                 except IOError, e:
                         if e.errno in [errno.EACCES, errno.EPERM]:
                                 return (REPO_VERIFY_PERM, path,
-                                    {"err": str(e), "hash": hash,
+                                    {"err": str(e), "hash": h,
                                     "pkg": pfmri})
                         else:
                                 return (REPO_VERIFY_BADGZIP, path,
-                                    {"hash": hash, "pkg": pfmri})
+                                    {"hash": h, "pkg": pfmri})
                 finally:
                         if gzf:
                                 gzf.close()
@@ -2223,17 +2261,18 @@
 
                                 # verify payload delivered by this pkg
                                 errors = []
-                                for h in hashes:
+                                for fname, h, alg in hashes:
                                         try:
                                                 path = self.cache_store.lookup(
-                                                     h, check_existence=False)
+                                                     fname,
+                                                     check_existence=False)
                                         except apx.PermissionsException, e:
                                                 # if we can't even get the path
                                                 # within the repository, then
                                                 # we'll do the best we can to
                                                 # report the problem.
                                                 errors.append((REPO_VERIFY_PERM,
-                                                    pfmri, {"hash": h,
+                                                    pfmri, {"hash": fname,
                                                     "err": _("Permission "
                                                     "denied.", "path", h)}))
                                                 continue
@@ -2242,7 +2281,8 @@
                                         if err:
                                                 errors.append(err)
                                                 continue
-                                        err = self.__verify_hash(path, pfmri, h)
+                                        err = self.__verify_hash(path, pfmri, h,
+                                            alg=alg)
                                         if err:
                                                 errors.append(err)
                                 for err in errors:
@@ -2648,9 +2688,9 @@
                             not (os.path.exists(self.pub_root) or
                             os.path.exists(os.path.join(
                                 self.root, "pkg5.image")) and
-                            Image(self.root, augment_ta_from_parent_image=False,
-                                allow_ondisk_upgrade=False,
-                                should_exist=True).version >= 3):
+                            int(cfg.FileConfig(os.path.join(
+                                self.root, "pkg5.image")).
+                                    get_property("image", "version")) >= 3):
                                 # If this isn't a repository creation operation,
                                 # and the base configuration file doesn't exist,
                                 # this isn't a valid repository.
@@ -2921,7 +2961,7 @@
                                 pfmri = fmri.PkgFmri(pfmri, client_release)
                 except fmri.FmriError, e:
                         raise RepositoryInvalidFMRIError(e)
- 
+
                 if pub and not pfmri.publisher:
                         pfmri.publisher = pub
 
@@ -3167,7 +3207,7 @@
                                 pfmri = fmri.PkgFmri(pfmri)
                 except fmri.FmriError, e:
                         raise RepositoryInvalidFMRIError(e)
- 
+
                 if not pub and pfmri.publisher:
                         pub = pfmri.publisher
                 elif pub and not pfmri.publisher:
@@ -3205,7 +3245,7 @@
                                 pfmri = fmri.PkgFmri(pfmri, client_release)
                 except fmri.FmriError, e:
                         raise RepositoryInvalidFMRIError(e)
- 
+
                 if pub and not pfmri.publisher:
                         pfmri.publisher = pub
 
--- a/src/modules/server/transaction.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/modules/server/transaction.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import calendar
@@ -34,6 +34,7 @@
 import urllib
 
 import pkg.actions as actions
+import pkg.digest as digest
 import pkg.fmri as fmri
 import pkg.manifest
 import pkg.misc as misc
@@ -467,10 +468,22 @@
                         action.data = lambda: open(os.devnull, "rb")
 
                 if action.data is not None:
-                        fname, data = misc.get_data_digest(action.data(),
-                            length=size, return_content=True)
+                        # get all hashes for this action
+                        hashes, data = misc.get_data_digest(action.data(),
+                            length=size, return_content=True,
+                            hash_attrs=digest.DEFAULT_HASH_ATTRS,
+                            hash_algs=digest.HASH_ALGS)
 
-                        action.hash = fname
+                        # set the hash member for backwards compatibility and
+                        # remove it from the dictionary
+                        action.hash = hashes.pop("hash", None)
+                        action.attrs.update(hashes)
+
+                        # now set the hash value that will be used for storing
+                        # the file in the repository.
+                        hash_attr, hash_val, hash_func = \
+                            digest.get_least_preferred_hash(action)
+                        fname = hash_val
 
                         # Extract ELF information
                         # XXX This needs to be modularized.
@@ -487,9 +500,34 @@
                                         raise TransactionContentError(e)
 
                                 try:
-                                        elf_hash = elf.get_dynamic(
-                                            elf_name)["hash"]
-                                        action.attrs["elfhash"] = elf_hash
+                                        # Check which content checksums to
+                                        # compute and add to the action
+                                        elf256 = "pkg.content-type.sha256"
+                                        elf1 = "elfhash"
+
+                                        if elf256 in \
+                                            digest.DEFAULT_CONTENT_HASH_ATTRS:
+                                                get_sha256 = True
+                                        else:
+                                                get_sha256 = False
+
+                                        if elf1 in \
+                                            digest.DEFAULT_CONTENT_HASH_ATTRS:
+                                                get_sha1 = True
+                                        else:
+                                                get_sha1 = False
+
+                                        dyn = elf.get_dynamic(
+                                            elf_name, sha1=get_sha1,
+                                            sha256=get_sha256)
+
+                                        if get_sha1:
+                                                action.attrs[elf1] = dyn["hash"]
+
+                                        if get_sha256:
+                                                action.attrs[elf256] = \
+                                                    dyn[elf256]
+
                                 except elf.ElfError:
                                         pass
                                 action.attrs["elfbits"] = str(elf_info["bits"])
@@ -506,9 +544,10 @@
                                         raise
                                 dst_path = None
 
-                        csize, chash = misc.compute_compressed_attrs(
+                        csize, chashes = misc.compute_compressed_attrs(
                             fname, dst_path, data, size, self.dir)
-                        action.attrs["chash"] = chash.hexdigest()
+                        for attr in chashes:
+                                action.attrs[attr] = chashes[attr].hexdigest()
                         action.attrs["pkg.csize"] = csize
                         chash = None
                         data = None
@@ -573,14 +612,18 @@
 
         def add_file(self, f, size=None):
                 """Adds the file to the Transaction."""
-
-                fname, data = misc.get_data_digest(f, length=size,
-                    return_content=True)
+                hashes, data = misc.get_data_digest(f, length=size,
+                    return_content=True, hash_attrs=digest.DEFAULT_HASH_ATTRS,
+                    hash_algs=digest.HASH_ALGS)
 
                 if size is None:
                         size = len(data)
 
                 try:
+                        # We don't have an Action yet, so passing None is fine.
+                        default_hash_attr = digest.get_least_preferred_hash(
+                            None)[0]
+                        fname = hashes[default_hash_attr]
                         dst_path = self.rstore.file(fname)
                 except Exception, e:
                         # The specific exception can't be named here due
@@ -590,9 +633,11 @@
                                 raise
                         dst_path = None
 
-                csize, chash = misc.compute_compressed_attrs(fname, dst_path,
-                    data, size, self.dir)
-                chash = None
+                csize, chashes = misc.compute_compressed_attrs(fname, dst_path,
+                    data, size, self.dir,
+                    chash_attrs=digest.DEFAULT_CHASH_ATTRS,
+                    chash_algs=digest.CHASH_ALGS)
+                chashes = None
                 data = None
 
                 self.remaining_payload_cnt -= 1
@@ -633,7 +678,7 @@
                 # XXX If we are going to publish, then we should augment
                 # our response with any other packages that moved to
                 # PUBLISHED due to the package's arrival.
-                
+
                 self.publish_package()
 
                 if add_to_catalog:
--- a/src/pkg/manifests/package:pkg.p5m	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/pkg/manifests/package:pkg.p5m	Fri Oct 11 16:09:34 2013 +0100
@@ -121,6 +121,7 @@
 file path=$(PYDIRVP)/pkg/cpiofile.py
 file path=$(PYDIRVP)/pkg/dependency.py
 file path=$(PYDIRVP)/pkg/depotcontroller.py
+file path=$(PYDIRVP)/pkg/digest.py
 file path=$(PYDIRVP)/pkg/elf.so
 file path=$(PYDIRVP)/pkg/facet.py
 dir  path=$(PYDIRVP)/pkg/file_layout
@@ -131,7 +132,6 @@
 file path=$(PYDIRVP)/pkg/flavor/__init__.py
 file path=$(PYDIRVP)/pkg/flavor/base.py
 file path=$(PYDIRVP)/pkg/flavor/depthlimitedmf.py
-file path=$(PYDIRVP)/pkg/flavor/depthlimitedmf27.py
 file path=$(PYDIRVP)/pkg/flavor/elf.py
 file path=$(PYDIRVP)/pkg/flavor/hardlink.py
 file path=$(PYDIRVP)/pkg/flavor/python.py
--- a/src/pkgdep.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/pkgdep.py	Fri Oct 11 16:09:34 2013 +0100
@@ -88,8 +88,9 @@
 
 Subcommands:
         pkgdepend generate [-IMm] -d dir [-d dir] [-D name=value] [-k path]
-            manifest_path
-        pkgdepend [options] resolve [-dmosv] manifest ...
+            manifest_file
+        pkgdepend resolve [-EmoSv] [-d output_dir]
+            [-e external_package_file]... [-s suffix] manifest_file ...
 
 Options:
         -R dir
--- a/src/pkgrepo.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/pkgrepo.py	Fri Oct 11 16:09:34 2013 +0100
@@ -1507,6 +1507,9 @@
                                    "opt":  opt, "arg": arg })
                         DebugValues.set_value(key, value)
 
+        if DebugValues:
+                reload(pkg.digest)
+
         subcommand = None
         if pargs:
                 subcommand = pargs.pop(0)
--- a/src/publish.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/publish.py	Fri Oct 11 16:09:34 2013 +0100
@@ -326,7 +326,7 @@
         if not repo_uri:
                 usage(_("A destination package repository must be provided "
                     "using -s."), cmd="publish")
- 
+
         if not pargs:
                 filelist = [("<stdin>", sys.stdin)]
         else:
@@ -740,6 +740,8 @@
         if repo_uri and not repo_uri.startswith("null:"):
                 repo_uri = misc.parse_uri(repo_uri)
 
+        if DebugValues:
+                reload(pkg.digest)
         subcommand = None
         if pargs:
                 subcommand = pargs.pop(0)
--- a/src/pull.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/pull.py	Fri Oct 11 16:09:34 2013 +0100
@@ -813,7 +813,7 @@
                 elif not pubs_specified:
                         unknown_pubs.append(sp)
 
-        # We only print warning if the user didn't specify any valid publishers 
+        # We only print warning if the user didn't specify any valid publishers
         # to add/sync.
         if len(unknown_pubs):
                 txt = _("\nThe following publishers are present in the "
@@ -1290,7 +1290,11 @@
                                                     "rb")
                                         t.add(a)
                                         if a.name == "signature":
-                                                for fp in a.get_chain_certs():
+                                                # We always store content in the
+                                                # repository by the least-
+                                                # preferred hash.
+                                                for fp in a.get_chain_certs(
+                                                    least_preferred=True):
                                                         fname = os.path.join(
                                                             pkgdir, fp)
                                                         t.add_file(fname)
--- a/src/sign.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/sign.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,11 +21,12 @@
 #
 
 #
-# Copyright (c) 2010, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import getopt
 import gettext
+import hashlib
 import locale
 import os
 import shutil
@@ -37,11 +38,13 @@
 import pkg.actions as actions
 import pkg.client.api_errors as api_errors
 import pkg.client.transport.transport as transport
+import pkg.digest as digest
 import pkg.fmri as fmri
 import pkg.manifest as manifest
 import pkg.misc as misc
 import pkg.publish.transaction as trans
 from pkg.client import global_settings
+from pkg.client.debugvalues import DebugValues
 from pkg.misc import emsg, msg, PipeError
 import M2Crypto as m2
 
@@ -60,7 +63,7 @@
 
         if cmd:
                 text = "%s: %s" % (cmd, text)
-                
+
         else:
                 text = "%s: %s" % (PKG_CLIENT_NAME, text)
 
@@ -119,7 +122,7 @@
         global_settings.client_name = "pkgsign"
 
         try:
-                opts, pargs = getopt.getopt(sys.argv[1:], "a:c:i:k:ns:",
+                opts, pargs = getopt.getopt(sys.argv[1:], "a:c:i:k:ns:D:",
                     ["help", "no-index", "no-catalog"])
         except getopt.GetoptError, e:
                 usage(_("illegal global option -- %s") % e.opt)
@@ -162,6 +165,14 @@
                         show_usage = True
                 elif opt == "--no-catalog":
                         add_to_catalog = False
+                elif opt == "-D":
+                        try:
+                                key, value = arg.split("=", 1)
+                                DebugValues.set_value(key, value)
+                        except (AttributeError, ValueError):
+                                error(_("%(opt)s takes argument of form "
+                                            "name=value, not %(arg)s") % {
+                                            "opt":  opt, "arg": arg })
 
         if show_usage:
                 usage(retcode=EXIT_OK)
@@ -201,12 +212,15 @@
                     "certificate.  Do not use the -k or -c options with this "
                     "algorithm.") % sig_alg)
 
+        if DebugValues:
+                reload(digest)
+
         errors = []
 
         t = misc.config_temp_root()
         temp_root = tempfile.mkdtemp(dir=t)
         del t
-        
+
         cache_dir = tempfile.mkdtemp(dir=temp_root)
         incoming_dir = tempfile.mkdtemp(dir=temp_root)
         chash_dir = tempfile.mkdtemp(dir=temp_root)
@@ -292,8 +306,12 @@
                                 # comparison to existing signatures.
                                 hsh = None
                                 if cert_path:
+                                        # Action identity still uses the 'hash'
+                                        # member of the action, so we need to
+                                        # stay with the sha1 hash.
                                         hsh, _dummy = \
-                                            misc.get_data_digest(cert_path)
+                                            misc.get_data_digest(cert_path,
+                                            hash_func=hashlib.sha1)
 
                                 # Check whether the signature about to be added
                                 # is identical, or almost identical, to existing
--- a/src/svc/svc-pkg-mirror	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/svc/svc-pkg-mirror	Fri Oct 11 16:09:34 2013 +0100
@@ -281,6 +281,7 @@
 	# we want to pkgrecv from.
 	#
 	i=0
+	index=0
 	while [ $i -lt ${#publishers[@]} ]; do
 		pub=${publishers[$i]}
 		sslkey=$($PKG -R $ref_image publisher $pub \
@@ -291,7 +292,6 @@
 		    | $GREP -v None | $SED -e 's/.* //g')
 		$PKG -R $ref_image publisher -F tsv > /tmp/pkg.mirror.$$
 
-		index=0
 		#
 		# this function depends on the output of
 		# 'pkg publisher -F tsv'. It really ought to use
--- a/src/sysrepo.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/sysrepo.py	Fri Oct 11 16:09:34 2013 +0100
@@ -28,7 +28,6 @@
 import errno
 import getopt
 import gettext
-import hashlib
 import locale
 import logging
 import os
@@ -51,6 +50,7 @@
 import pkg.client.api
 import pkg.client.progress as progress
 import pkg.client.api_errors as apx
+import pkg.digest as digest
 import pkg.misc as misc
 import pkg.portable as portable
 import pkg.p5p as p5p
@@ -747,7 +747,7 @@
 
 def _uri_hash(uri):
         """Returns a string hash of the given URI"""
-        return hashlib.sha1(uri).hexdigest()
+        return digest.DEFAULT_HASH_FUNC(uri).hexdigest()
 
 def _chown_runtime_dir(runtime_dir):
         """Change the ownership of all files under runtime_dir to our sysrepo
--- a/src/tests/api/t_api_search.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/api/t_api_search.py	Fri Oct 11 16:09:34 2013 +0100
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 
-# Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
 
 import testutils
 if __name__ == "__main__":
@@ -28,11 +28,8 @@
 import pkg5unittest
 
 import copy
-import difflib
 import os
-import re
 import shutil
-import sys
 import tempfile
 import time
 import unittest
@@ -41,12 +38,10 @@
 import pkg.client.api as api
 import pkg.client.api_errors as api_errors
 import pkg.client.query_parser as query_parser
-import pkg.client.progress as progress
 import pkg.fmri as fmri
 import pkg.indexer as indexer
 import pkg.portable as portable
 import pkg.search_storage as ss
-import pkg.server.repository as srepo
 
 
 class TestApiSearchBasics(pkg5unittest.SingleDepotTestCase):
@@ -426,12 +421,24 @@
         ])
 
         res_remote_file = set([
-            ('pkg:/[email protected]',
-             'path',
-             'file a686473102ba73bd7920fc0ab1d97e00a24ed704 chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=30 pkg.size=12'),
-            ('pkg:/[email protected]',
-             'a686473102ba73bd7920fc0ab1d97e00a24ed704',
-             'file a686473102ba73bd7920fc0ab1d97e00a24ed704 chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=30 pkg.size=12')
+            ("pkg:/[email protected]",
+             "path",
+             "file a686473102ba73bd7920fc0ab1d97e00a24ed704 "
+             "chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin "
+             "mode=0555 owner=root path=bin/example_path pkg.csize=30 "
+             "pkg.size=12"),
+            ("pkg:/[email protected]",
+             "a686473102ba73bd7920fc0ab1d97e00a24ed704",
+             "file a686473102ba73bd7920fc0ab1d97e00a24ed704 "
+             "chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin "
+             "mode=0555 owner=root path=bin/example_path pkg.csize=30 "
+             "pkg.size=12"),
+             ("pkg:/[email protected]",
+             "hash",
+             "file a686473102ba73bd7920fc0ab1d97e00a24ed704 "
+             "chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin "
+             "mode=0555 owner=root path=bin/example_path pkg.csize=30 "
+             "pkg.size=12")
         ]) | res_remote_path
 
         res_remote_url = set([
@@ -441,15 +448,30 @@
         ])
 
         res_remote_path_extra = set([
-            ('pkg:/[email protected]',
-             'basename',
-             'file a686473102ba73bd7920fc0ab1d97e00a24ed704 chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=30 pkg.size=12'),
-            ('pkg:/[email protected]',
-             'path',
-             'file a686473102ba73bd7920fc0ab1d97e00a24ed704 chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=30 pkg.size=12'),
-            ('pkg:/[email protected]',
-             'a686473102ba73bd7920fc0ab1d97e00a24ed704',
-             'file a686473102ba73bd7920fc0ab1d97e00a24ed704 chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin mode=0555 owner=root path=bin/example_path pkg.csize=30 pkg.size=12')
+            ("pkg:/[email protected]",
+             "basename",
+             "file a686473102ba73bd7920fc0ab1d97e00a24ed704 "
+             "chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin "
+             "mode=0555 owner=root path=bin/example_path pkg.csize=30 "
+             "pkg.size=12"),
+            ("pkg:/[email protected]",
+             "path",
+             "file a686473102ba73bd7920fc0ab1d97e00a24ed704 "
+             "chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin "
+             "mode=0555 owner=root path=bin/example_path pkg.csize=30 "
+             "pkg.size=12"),
+            ("pkg:/[email protected]",
+             "a686473102ba73bd7920fc0ab1d97e00a24ed704",
+             "file a686473102ba73bd7920fc0ab1d97e00a24ed704 "
+             "chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin "
+             "mode=0555 owner=root path=bin/example_path pkg.csize=30 "
+             "pkg.size=12"),
+            ("pkg:/[email protected]",
+            "hash",
+            "file a686473102ba73bd7920fc0ab1d97e00a24ed704 "
+            "chash=f88920ce1f61db185d127ccb32dc8cf401ae7a83 group=bin "
+            "mode=0555 owner=root path=bin/example_path pkg.csize=30 "
+            "pkg.size=12")
         ])
 
         res_bad_pkg = set([
--- a/src/tests/api/t_elf.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/api/t_elf.py	Fri Oct 11 16:09:34 2013 +0100
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 
-# Copyright (c) 2008, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
 
 import testutils
 if __name__ == "__main__":
@@ -31,7 +31,6 @@
 import pkg.elf as elf
 import os
 import re
-import sys
 import pkg.portable
 
 class TestElf(pkg5unittest.Pkg5TestCase):
@@ -82,5 +81,32 @@
                         elf.get_dynamic(p)
                         elf.get_info(p)
 
+        def test_get_dynamic_params(self):
+                """Test that get_dynamic(..) returns checksums according to the
+                parameters passed to the method."""
+
+                # Check that the hashes generated have the correct length
+                # depending on the algorithm used to generated.
+                sha1_len = 40
+                sha256_len = 64
+
+                # the default is to return an SHA-1 elfhash only
+                d = elf.get_dynamic(self.elf_paths[0])
+                self.assert_(len(d["hash"]) == sha1_len)
+                self.assert_("pkg.content-type.sha256" not in d)
+
+                d = elf.get_dynamic(self.elf_paths[0], sha256=True)
+                self.assert_(len(d["hash"]) == sha1_len)
+                self.assert_(len(d["pkg.content-type.sha256"]) == sha256_len)
+
+                d = elf.get_dynamic(self.elf_paths[0], sha1=False, sha256=True)
+                self.assert_("hash" not in d)
+                self.assert_(len(d["pkg.content-type.sha256"]) == sha256_len)
+
+                d = elf.get_dynamic(self.elf_paths[0], sha1=False, sha256=False)
+                self.assert_("hash" not in d)
+                self.assert_("pkg.content-type.sha256" not in d)
+
+
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/api/t_manifest.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/api/t_manifest.py	Fri Oct 11 16:09:34 2013 +0100
@@ -32,6 +32,7 @@
 
 import pkg as pkg
 import pkg.client.api_errors as api_errors
+import pkg.digest as digest
 import pkg.manifest as manifest
 import pkg.misc as misc
 import pkg.actions as actions
@@ -415,15 +416,17 @@
         def test_store_to_disk(self):
                 """Verfies that a FactoredManifest gets force-loaded before it
                 gets stored to disk."""
- 
+
                 m1 = manifest.FactoredManifest("[email protected]", self.cache_dir,
                     pathname=self.foo_content_p5m)
 
                 tmpdir = tempfile.mkdtemp(dir=self.test_root)
                 path = os.path.join(tmpdir, "manifest.p5m")
                 m1.store(path)
-                self.assertEqual(misc.get_data_digest(path),
-                    misc.get_data_digest(self.foo_content_p5m))
+                self.assertEqual(misc.get_data_digest(path,
+                    hash_func=digest.DEFAULT_HASH_FUNC),
+                    misc.get_data_digest(self.foo_content_p5m,
+                    hash_func=digest.DEFAULT_HASH_FUNC))
 
         def test_get_directories(self):
                 """Verifies that get_directories() works as expected."""
--- a/src/tests/api/t_p5p.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/api/t_p5p.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -31,10 +31,12 @@
 
 import difflib
 import errno
+import hashlib
 import unittest
 import os
 import pkg.catalog
 import pkg.client.progress
+import pkg.digest as digest
 import pkg.fmri
 import pkg.misc
 import pkg.p5p
@@ -632,18 +634,25 @@
                 arc = pkg.p5p.Archive(arc_path, mode="r",
                     archive_index=archive_index)
 
+                # We always store content using the least_preferred hash, so
+                # determine what that is so that we can verify it using
+                # gunzip_from_stream.
+                hash_func = digest.get_least_preferred_hash(None)[2]
+
                 # Test behaviour when specifying publisher.
                 nullf = open(os.devnull, "wb")
                 for h in hashes["test"]:
                         fobj = arc.get_package_file(h, pub="test")
-                        uchash = pkg.misc.gunzip_from_stream(fobj, nullf)
+                        uchash = pkg.misc.gunzip_from_stream(fobj, nullf,
+                            hash_func=hash_func)
                         self.assertEqual(uchash, h)
                         fobj.close()
 
                 # Test behaviour when not specifying publisher.
                 for h in hashes["test"]:
                         fobj = arc.get_package_file(h)
-                        uchash = pkg.misc.gunzip_from_stream(fobj, nullf)
+                        uchash = pkg.misc.gunzip_from_stream(fobj, nullf,
+                            hash_func=hash_func)
                         self.assertEqual(uchash, h)
                         fobj.close()
 
@@ -723,7 +732,8 @@
                 arc.add_repo_package(self.quux, repo)
                 arc.close()
 
-                # Get list of file hashes.
+                # Get list of file hashes. These will be the "least-preferred"
+                # hash for the actions being stored.
                 hashes = { "all": set() }
                 for rstore in repo.rstores:
                         for dirpath, dirnames, filenames in os.walk(
--- a/src/tests/api/t_pkglint.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/api/t_pkglint.py	Fri Oct 11 16:09:34 2013 +0100
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 
-# Copyright (c) 2010, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
 
 import testutils
 if __name__ == "__main__":
@@ -2075,6 +2075,24 @@
 depend type=incorporate fmri=pkg:/some/package
 """
 
+expected_failures["facetvalue-invalid.mf" ] = ["pkglint.action012"]
+broken_manifests["facetvalue-invalid.mf" ] = \
+"""
+#
+# Intentionally set facet into a value other than 'true', 'false' or 'all'
+#
+set name=pkg.fmri value=pkg://opensolaris.org/pkglint/[email protected],1.0
+set name=org.opensolaris.consolidation value=osnet
+set name=variant.opensolaris.zone value=global value=nonglobal
+set name=pkg.description value="A pkglint test"
+set name=pkg.summary value="Yet another test"
+set name=variant.arch value=i386 value=sparc
+set name=info.classification value=org.opensolaris.category.2008:System/Packaging
+link path=usr/bin/perl target=usr/perl5/5.6/bin/perl mediator=perl mediator-version=5.6
+link path=usr/bin/perl target=usr/perl5/5.12/bin/perl mediator=perl mediator-version=5.12
+file path=usr/perl5/5.6/bin/perl facet.doc.man=other owner=root group=sys mode=0755
+file path=usr/perl5/5.12/bin/perl owner=root group=sys mode=0755
+"""
 
 class TestLogFormatter(log.LogFormatter):
         """Records log messages to a buffer"""
--- a/src/tests/cli/t_depot_config.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_depot_config.py	Fri Oct 11 16:09:34 2013 +0100
@@ -336,6 +336,8 @@
 
                 fmris = self.pkgsend_bulk(self.dcs[1].get_repo_url(),
                     self.new_pkg)
+                r2_fmris = self.pkgsend_bulk(self.dcs[2].get_repo_url(),
+                    self.sample_pkg)
                 self.depotconfig("")
                 self.start_depot()
 
@@ -361,18 +363,70 @@
                         "/depot/default/en/search.shtml?token=pkg&action=Search"
                 ]
 
-                for p in paths:
-                        url_path = "%s%s" % (self.ac.url, p % conf)
+                def get_url(url_path):
                         try:
-                                url_obj = urllib2.urlopen(url_path)
+                                url_obj = urllib2.urlopen(url_path, timeout=10)
                                 self.assert_(url_obj.code == 200,
                                     "Failed to open %s: %s" % (url_path,
                                     url_obj.code))
+                                url_obj.close()
                         except urllib2.HTTPError, e:
                                 self.debug("Failed to open %s: %s" %
                                     (url_path, e))
                                 raise
 
+                for p in paths:
+                        get_url("%s%s" % (self.ac.url, p % conf))
+
+                self.ac.stop()
+
+                # test that pkg.depot-config detects missing repos
+                broken_rdir = self.rdir2 + "foo"
+                os.rename(self.rdir2, broken_rdir)
+                self.depotconfig("", exit=1)
+
+                # test that when we break one of the repositories we're
+                # serving, that the remaining repositories are still accessible
+                # from the bui. We need to fix the repo dir before rebuilding
+                # the configuration, then break it once the depot has started
+                os.rename(broken_rdir, self.rdir2)
+                self.depotconfig("")
+                os.rename(self.rdir2, broken_rdir)
+                self.start_depot(build_indexes=False)
+
+                # check the first request to the BUI works as expected
+                get_url(self.ac.url)
+
+                # and check that we get a 404 for the missing repo
+                bad_url = "%s/usr/test2/en/catalog.shtml" % self.ac.url
+                raised_404 = False
+                try:
+                        url_obj = urllib2.urlopen(bad_url, timeout=10)
+                        url_obj.close()
+                except urllib2.HTTPError, e:
+                        if e.code == 404:
+                                raised_404 = True
+                self.assert_(raised_404, "Didn't get a 404 opening %s" %
+                    bad_url)
+
+                # check that we can still reach other valid paths
+                paths = [
+                        "/",
+                        "/default/test1",
+                        "/default/en",
+                        "/default/en/index.shtml",
+                        "/default/en/catalog.shtml",
+                        "/default/p5i/0/new.p5i",
+                        "/default/info/0/%(esc_full_fmri)s",
+                        "/default/test1/info/0/%(esc_full_fmri)s",
+                        "/default/manifest/0/%(esc_full_fmri)s",
+                        "/default/en/search.shtml",
+                ]
+                for p in paths:
+                        self.debug(p)
+                        get_url("%s%s" % (self.ac.url, p % conf))
+                os.rename(broken_rdir, self.rdir2)
+
         def test_12_htpkgclient(self):
                 """A depot-config can act as a repository server for pkg(1)
                 clients, with all functionality supported."""
--- a/src/tests/cli/t_https.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_https.py	Fri Oct 11 16:09:34 2013 +0100
@@ -23,13 +23,13 @@
 #
 # Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
 #
-import sys
 
 import testutils
 if __name__ == "__main__":
         testutils.setup_environment("../../../proto")
 import pkg5unittest
 
+import hashlib
 import os
 import shutil
 import stat
@@ -165,14 +165,14 @@
                 """ Test that an expired cert for one publisher doesn't prevent
                 making changes to other publishers due to certifcate checks on
                 all configured publishers. (Bug 17018362)"""
-                
+
                 bad_cert_path = os.path.join(self.cs_dir,
                     "cs3_ch1_ta3_cert.pem")
                 good_cert_path = os.path.join(self.cs_dir,
                     self.get_cli_cert("test"))
                 self.ac.start()
                 self.image_create()
-                
+
                 # Set https-based publisher with correct cert.
                 self.seed_ta_dir("ta7")
                 self.pkg("set-publisher -k %(key)s -c %(cert)s -p %(url)s" % {
@@ -186,10 +186,12 @@
                 # Replace cert of first publisher with one that is expired.
                 # It doesn't need to match the key because we just want to
                 # test if the cert validation code works correctly so we are not
-                # actually using the cert. 
+                # actually using the cert.
 
-                # Cert is stored by content hash in the pkg config of the image.
-                ch = misc.get_data_digest(good_cert_path)[0]
+                # Cert is stored by content hash in the pkg config of the image,
+                # which must be a SHA-1 hash for backwards compatibility.
+                ch = misc.get_data_digest(good_cert_path,
+                    hash_func=hashlib.sha1)[0]
                 pkg_cert_path = os.path.join(self.get_img_path(), "var", "pkg",
                     "ssl", ch)
                 shutil.copy(bad_cert_path, pkg_cert_path)
--- a/src/tests/cli/t_pkg_info.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkg_info.py	Fri Oct 11 16:09:34 2013 +0100
@@ -75,6 +75,12 @@
             close
         """
 
+        human2 = """
+            open [email protected],5.11-0:20110908T004546Z
+            add set name=pkg.human-version value=0.9.8.18
+            close
+        """
+
         misc_files = [ "tmp/bronzeA1",  "tmp/bronzeA2", "tmp/bronze1",
             "tmp/bronze2", "tmp/copyright1", "tmp/copyright0", "tmp/sh",
             "tmp/baz"]
@@ -92,7 +98,8 @@
                 pkg5unittest.SingleDepotTestCase.setUp(self)
                 self.make_misc_files(self.misc_files)
                 self.plist = self.pkgsend_bulk(self.rurl, (self.badfile10,
-                    self.baddir10, self.bronze10, self.bronze05, self.human))
+                    self.baddir10, self.bronze10, self.bronze05, self.human,
+                    self.human2))
 
         def test_pkg_info_bad_fmri(self):
                 """Test bad frmi's with pkg info."""
@@ -313,11 +320,16 @@
 
         def test_human_version(self):
                 """Verify that info returns the expected output for packages
-                with a human-readable version defined."""
+                with a human-readable version defined. If it is the same as
+                version number, then only version number is displayed"""
 
                 self.image_create(self.rurl)
                 self.pkg("info -r human | grep 'Version: 0.9.8.18 (0.9.8r)'")
 
+                # Verify that human version number should not be displayed
+                # if it is identical to the version number.
+                self.pkg("info -r human2 | grep 'Version: 0.9.8.18$'")
+
         def test_ranked(self):
                 """Verify that pkg info -r returns expected results when
                 multiple publishers provide the same package based on
--- a/src/tests/cli/t_pkg_install.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkg_install.py	Fri Oct 11 16:09:34 2013 +0100
@@ -1915,6 +1915,7 @@
             add dir mode=0755 owner=root group=bin path=etc
             add file tmp/config1 mode=0644 owner=root group=bin path=etc/foo
             add hardlink path=etc/foo.link target=foo
+            add license tmp/copyright1 license=copyright
             close
         """
         iron20 = """
@@ -1922,6 +1923,7 @@
             add dir mode=0755 owner=root group=bin path=etc
             add file tmp/config2 mode=0644 owner=root group=bin path=etc/foo
             add hardlink path=etc/foo.link target=foo
+            add license tmp/copyright2 license=copyright
             close
         """
 
@@ -3062,6 +3064,53 @@
                 self.pkg("update [email protected]")
                 self.file_contains(new_cfg_path, "preserve2")
 
+        def test_many_hashalgs(self):
+                """Test that when upgrading actions where the new action
+                contains more hash attributes than the old action, that the
+                upgrade works."""
+
+                self.pkgsend_bulk(self.rurl, (self.iron10))
+                self.image_create(self.rurl, destroy=True)
+                self.pkg("install [email protected]")
+                self.pkg("contents -m iron")
+                # We have not enabled SHA2 hash publication yet.
+                self.assert_("pkg.hash.sha256" not in self.output)
+
+                # publish with SHA1 and SHA2 hashes
+                self.pkgsend_bulk(self.rurl, self.iron20,
+                    debug_hash="sha1+sha256")
+
+                # verify that a non-SHA2 aware client can install these bits
+                self.pkg("-D hash=sha1 update")
+                self.image_create(self.rurl, destroy=True)
+
+                # This also tests package retrieval: we always retrieve packages
+                # with the least-preferred hash, but verify with the
+                # most-preferred hash.
+                self.pkg("install [email protected]")
+                self.pkg("contents -m iron")
+                self.assert_("pkg.hash.sha256" in self.output)
+
+                # publish with only SHA-2 hashes
+                self.pkgsend_bulk(self.rurl, self.iron20, debug_hash="sha256")
+
+                # verify that a non-SHA2 aware client cannot install these bits
+                # since there are no SHA1 hashes present
+                self.pkg("-D hash=sha1 update", exit=1)
+                self.assert_(
+                    "No file could be found for the specified hash name: "
+                    "'NOHASH'" in self.errout)
+
+                # Make sure we've been publishing only with sha256 by removing
+                # those known attributes, then checking for the presence of
+                # the SHA-1 attributes.
+                self.pkg("-D hash=sha256 update")
+                self.pkg("contents -m iron")
+                for attr in ["pkg.hash.sha256", "pkg.chash.sha256"]:
+                        self.output = self.output.replace(attr, "")
+                self.assert_("hash" not in self.output)
+                self.assert_("chash" not in self.output)
+
 
 class TestPkgInstallActions(pkg5unittest.SingleDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
--- a/src/tests/cli/t_pkg_publisher.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkg_publisher.py	Fri Oct 11 16:09:34 2013 +0100
@@ -29,6 +29,7 @@
         testutils.setup_environment("../../../proto")
 import pkg5unittest
 
+import hashlib
 import os
 import pkg.client.image as image
 import pkg.misc
@@ -160,8 +161,12 @@
                     exit=2)
 
                 # Listing publishers should succeed even if key file is gone.
+                # This test relies on using the same implementation used in
+                # image.py __store_publisher_ssl() which sets the paths to the
+                # SSL keys/certs.
                 img_key_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(key_path)[0])
+                    "ssl", pkg.misc.get_data_digest(key_path,
+                    hash_func=hashlib.sha1)[0])
                 os.unlink(img_key_path)
                 self.pkg("publisher test1")
 
@@ -187,7 +192,8 @@
 
                 # Listing publishers should be possible if cert file is gone.
                 img_cert_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(cert_path)[0])
+                    "ssl", pkg.misc.get_data_digest(cert_path,
+                    hash_func=hashlib.sha1)[0])
                 os.unlink(img_cert_path)
                 self.pkg("publisher test1", exit=3)
 
@@ -311,10 +317,15 @@
                 self.pkg("set-publisher --no-refresh -c %s test1" % cert_path)
                 self.pkg("set-publisher --no-refresh -k %s test1" % key_path)
 
+                # This test relies on using the same implementation used in
+                # image.py __store_publisher_ssl() which sets the paths to the
+                # SSL keys/certs.
                 img_key_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(key_path)[0])
+                    "ssl", pkg.misc.get_data_digest(key_path,
+                    hash_func=hashlib.sha1)[0])
                 img_cert_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(cert_path)[0])
+                    "ssl", pkg.misc.get_data_digest(cert_path,
+                    hash_func=hashlib.sha1)[0])
 
                 # Make the cert/key unreadable by unprivileged users.
                 os.chmod(img_key_path, 0000)
@@ -881,10 +892,15 @@
                     (key_path, cert_path))
                 self.pkg("publisher test1")
 
+                # This test relies on using the same implementation used in
+                # image.py __store_publisher_ssl() which sets the paths to the
+                # SSL keys/certs.
                 img_key_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(key_path)[0])
+                    "ssl", pkg.misc.get_data_digest(key_path,
+                    hash_func=hashlib.sha1)[0])
                 img_cert_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(cert_path)[0])
+                    "ssl", pkg.misc.get_data_digest(cert_path,
+                    hash_func=hashlib.sha1)[0])
                 self.assert_(img_key_path in self.output)
                 self.assert_(img_cert_path in self.output)
 
--- a/src/tests/cli/t_pkg_refresh.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkg_refresh.py	Fri Oct 11 16:09:34 2013 +0100
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 
-# Copyright (c) 2008, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
 
 import testutils
 if __name__ == "__main__":
@@ -28,6 +28,7 @@
 import pkg5unittest
 
 import difflib
+import hashlib
 import os
 import re
 import shutil
@@ -331,10 +332,15 @@
                 self.pkg("set-publisher --no-refresh -k %s test1" % key_path)
 
 
+                # This test relies on using the same implementation used in
+                # image.py __store_publisher_ssl() which sets the paths to the
+                # SSL keys/certs.
                 img_key_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(key_path)[0])
+                    "ssl", pkg.misc.get_data_digest(key_path,
+                    hash_func=hashlib.sha1)[0])
                 img_cert_path = os.path.join(self.img_path(), "var", "pkg",
-                    "ssl", pkg.misc.get_data_digest(cert_path)[0])
+                    "ssl", pkg.misc.get_data_digest(cert_path,
+                    hash_func=hashlib.sha1)[0])
 
                 # Make the cert/key unreadable by unprivileged users.
                 os.chmod(img_key_path, 0000)
--- a/src/tests/cli/t_pkg_revert.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkg_revert.py	Fri Oct 11 16:09:34 2013 +0100
@@ -43,10 +43,7 @@
             add dir mode=0755 owner=root group=bin path=etc
             add file etc/file1 mode=0555 owner=root group=bin path=etc/file1
             close
-            open [email protected],5.11-0
-            add dir mode=0755 owner=root group=bin path=etc
-            add file etc/file2 mode=0555 owner=root group=bin path=etc/file2 revert-tag=bob
-            close
+            # [email protected] is published as part of pkgs2
             open [email protected],5.11-0
             add dir mode=0755 owner=root group=bin path=etc
             add file etc/file3 mode=0555 owner=root group=bin path=etc/file3 revert-tag=bob revert-tag=ted
@@ -80,6 +77,14 @@
             open [email protected],5.11-0
             add dir mode=0755 owner=root group=bin path=etc/y-dir revert-tag=bob=*
             close
+            """
+
+        # A set of packages that we publish with additional hash attributes
+        pkgs2 = """
+            open [email protected],5.11-0
+            add dir mode=0755 owner=root group=bin path=etc
+            add file etc/file2 mode=0555 owner=root group=bin path=etc/file2 revert-tag=bob
+            close
             open [email protected],5.11-0
             add dir mode=0755 owner=root group=bin path=dev revert-tag=init-dev=*
             add dir mode=0755 owner=root group=bin path=dev/cfg revert-tag=init-dev=*
@@ -174,6 +179,8 @@
                 self.make_misc_files(self.misc_files)
                 self.make_misc_files(self.additional_files)
                 self.plist = self.pkgsend_bulk(self.rurl, self.pkgs)
+                self.plist.extend(self.pkgsend_bulk(self.rurl, self.pkgs2,
+                    debug_hash="sha1+sha256"))
 
         def test_revert(self):
                 self.image_create(self.rurl)
@@ -184,7 +191,17 @@
                 self.damage_all_files()
                 # make sure we broke 'em
                 self.pkg("verify A", exit=1)
+
+                # We expect that the SHA-2 hash is used whenever there are SHA-2
+                # hashes on the action. Even though this client is run in
+                # "SHA-1" mode as well as "SHA-2" mode, we always verify with
+                # the most-preferred hash available.
+                self.pkg("-D hash=sha1+sha256 verify B", exit=1)
+                sha2 = "e3868252b2b2de64e85f5b221e46eb23c428fe5168848eb36d113c66628131ce"
+                self.assert_(sha2 in self.output)
                 self.pkg("verify B", exit=1)
+                self.assert_(sha2 in self.output)
+
                 self.pkg("verify C", exit=1)
                 self.pkg("verify D", exit=1)
 
@@ -214,11 +231,21 @@
 
                 # revert damage to B, C, D by tag and test the parsable output.
                 self.pkg("revert -n --parsable=0 --tagged bob")
+                self.debug("\n".join(self.plist))
                 self.assertEqualParsable(self.output,
-                    affect_packages=[self.plist[1], self.plist[2], self.plist[3]])
-                self.pkg("revert --parsable=0 --tagged bob")
+                    affect_packages=[self.plist[10], self.plist[1],
+                    self.plist[2]])
+                # When reverting damage, we always verify using the
+                # most-preferred hash, but retrieve content with the
+                # least-preferred hash: -D hash=sha1+sha256 should have no
+                # effect here whatsoever, but -D hash=sha256 should fail because
+                # our repository stores its files by the SHA1 hash.
+                self.pkg("-D hash=sha256 revert --parsable=0 --tagged bob",
+                    exit=1)
+                self.pkg("-D hash=sha1+sha256 revert --parsable=0 --tagged bob")
                 self.assertEqualParsable(self.output,
-                    affect_packages=[self.plist[1], self.plist[2], self.plist[3]])
+                    affect_packages=[self.plist[10], self.plist[1],
+                    self.plist[2]])
                 self.pkg("verify A", exit=1)
                 self.pkg("verify B")
                 self.pkg("verify C")
--- a/src/tests/cli/t_pkg_search.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkg_search.py	Fri Oct 11 16:09:34 2013 +0100
@@ -30,6 +30,7 @@
 import pkg5unittest
 
 import copy
+import hashlib
 import os
 import shutil
 import sys
@@ -65,7 +66,7 @@
             add dir mode=0755 owner=root group=bin path=/bin
             add file tmp/example_file mode=0555 owner=root group=bin path=/bin/example_path11
             close """
-        
+
         incorp_pkg10 = """
             open [email protected],5.11-0
             add depend [email protected],5.11-0 type=incorporate
@@ -295,7 +296,8 @@
 
         res_remote_file = set([
             'path       file      bin/example_path          pkg:/[email protected]\n',
-            'b40981aab75932c5b2f555f50769d878e44913d7 file      bin/example_path          pkg:/[email protected]\n'
+            'b40981aab75932c5b2f555f50769d878e44913d7 file      bin/example_path          pkg:/[email protected]\n',
+            'hash                                     file   bin/example_path pkg:/[email protected]\n'
         ]) | res_remote_path
 
 
@@ -308,7 +310,8 @@
              headers,
              'path       file      bin/example_path          pkg:/[email protected]\n',
              'basename   file      bin/example_path          pkg:/[email protected]\n',
-             'b40981aab75932c5b2f555f50769d878e44913d7 file      bin/example_path          pkg:/[email protected]\n'
+             'b40981aab75932c5b2f555f50769d878e44913d7 file      bin/example_path          pkg:/[email protected]\n',
+             'hash                                     file   bin/example_path pkg:/[email protected]\n'
         ])
 
         o_headers = \
@@ -336,6 +339,14 @@
         res_pkg_options_remote = set([pkg_headers, pkg_results])
         res_pkg_options_local = set([pkg_headers, pkg_results_no_pub])
 
+        # Creating a query string in which the number of terms is > 100
+        large_query = "a b c d e f g h i j k l m n o p q r s t u v w x y z" \
+                      "a b c d e f g h i j k l m n o p q r s t u v w x y z" \
+                      "a b c d e f g h i j k l m n o p q r s t u v w x y z" \
+                      "a b c d e f g h i j k l m n o p q r s t u v w x y z" \
+                      "a b c d e f g h i j k l m n o p q r s t u v w x y z" \
+                      "a b c d e f g h i j k l m n o p q r s t u v w x y z"
+
         def setUp(self):
                 # This test needs an actual depot for now.
                 pkg5unittest.SingleDepotTestCase.setUp(self, start_depot=True)
@@ -468,6 +479,9 @@
                 self.pkg("search -a -r -I ':set:pkg.fmri:exAMple_pkg'", exit=1)
                 self.assert_(self.errout == "" )
 
+                self.pkg("search -a -r %s" %self.large_query, exit=1)
+                self.assert_(self.errout != "") 
+
         def _run_local_tests(self):
                 outfile = os.path.join(self.test_root, "res")
 
@@ -542,6 +556,9 @@
                 self.pkg("search -a -l 'e* OR <e*>'", exit=1)
                 self._search_op(False, "pkg:/example_path", self.res_local_path)
 
+                self.pkg("search -a -l %s" %self.large_query, exit=1)
+                self.assert_(self.errout != "") 
+
         def _run_local_empty_tests(self):
                 self.pkg("search -a -l example_pkg", exit=1)
                 self.pkg("search -a -l example_path", exit=1)
@@ -978,6 +995,7 @@
                 self.assertEqualDiff(expected, actual)
                 self.pkg("search example_path", exit=1)
 
+
 class TestSearchMultiPublisher(pkg5unittest.ManyDepotTestCase):
 
         same_pub1 = """
@@ -1021,11 +1039,20 @@
         }
 
         def setUp(self):
-                pkg5unittest.ManyDepotTestCase.setUp(self,["samepub", "samepub"],
-                    start_depots=True)
+                pkg5unittest.ManyDepotTestCase.setUp(self, ["samepub",
+                    "samepub"], start_depots=True)
                 self.make_misc_files(self.misc_files)
                 self.durl1 = self.dcs[1].get_depot_url()
+                self.pkgsend_bulk(self.durl1, self.same_pub1, refresh_index=True)
                 self.durl2 = self.dcs[2].get_depot_url()
+                self.rurl2 = self.dcs[2].get_repo_url()
+                # our 2nd depot gets the package published with multiple hash
+                # attributes, but served from a single-hash-aware depot
+                # (the fact that it's single-hash-aware should make no
+                # difference to the content it serves so long as the index was
+                # generated while we were aware of multiple hashes.
+                self.pkgsend_bulk(self.rurl2, self.same_pub2,
+                    refresh_index=True, debug_hash="sha1+sha256")
 
         def test_7140657(self):
                 """ Check that pkg search with -s works as intended when there are
@@ -1083,6 +1110,54 @@
                 expected = self.reduceSpaces(expected_out2)
                 self.assertEqualDiff(expected, actual)
 
+        def test_search_multi_hash(self):
+                """Check that when searching a repository with multiple
+                hashes, all hash attributes are indexed and we can search
+                against all hash attributes.
+
+                This test depends on pkg.digest having DebugValue settings
+                that add sha256 hashes to the set of hashes we append to
+                actions at publication time."""
+
+                self.image_create(self.durl2, prefix="samepub")
+
+                # manually calculate the hashes, in case of bugs in
+                # pkg.misc.get_data_digest
+                sha1_hash = hashlib.sha1("magic").hexdigest()
+                sha2_hash = hashlib.sha256("magic").hexdigest()
+
+                self.pkg("search %s" % sha1_hash)
+                self.pkg("search %s" % sha2_hash)
+
+                # Check that we're matching on the correct index.
+                # For sha1 hashes, our the 'index' returned is actually the
+                # hash itself - that seems unusual, but it's the way the
+                # index was built. We also emit a 2nd search result that shows
+                # 'hash', in order to be consistent with the way we print
+                # the pkg.hash.sha* attribute when dealing with other hashes.
+                self.pkg("search -H -o search.match_type %s" % sha1_hash)
+                self.assertEqualDiff(
+                    self.reduceSpaces(self.output), "%s\nhash\n" % sha1_hash)
+
+                self.pkg("search -H -o search.match_type %s" % sha2_hash)
+                self.assertEqualDiff(
+                    self.reduceSpaces(self.output), "pkg.hash.sha256\n")
+
+                # check that both searches match the same action
+                self.pkg("search -o action.raw %s" % sha1_hash)
+                sha1_action = self.reduceSpaces(self.output)
+
+                self.pkg("search -o action.raw %s" % sha2_hash)
+                sha2_action = self.reduceSpaces(self.output)
+                self.assertEqualDiff(sha1_action, sha2_action)
+
+                # check that the same searches in the non-multihash-aware
+                # repository only return a result for the sha-1 hash
+                # (which checks that we're only setting multiple hashes
+                # on actions when hash=sha1+sha256 is set)
+                self.pkg("search -s %s %s" % (self.durl1, sha1_hash))
+                self.pkg("search -s %s %s" % (self.durl1, sha2_hash), exit=1)
+
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkg_sysrepo.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkg_sysrepo.py	Fri Oct 11 16:09:34 2013 +0100
@@ -30,16 +30,12 @@
 import pkg5unittest
 
 import copy
-import hashlib
 import os
 import shutil
-import signal
-import sys
-import time
 
-import pkg.client.api as api
 import pkg.client.api_errors as apx
 import pkg.client.transport.exception as tx
+import pkg.digest as digest
 import pkg.misc as misc
 
 class PC(object):
@@ -86,13 +82,22 @@
 
         bar10 = """
             open [email protected],5.11-0
+            add file tmp/example_two mode=0555 owner=root group=bin path=/usr/bin/example_path3
             close"""
 
         bar11 = """
             open [email protected],5.11-0
+            add file tmp/example_two mode=0555 owner=root group=bin path=/usr/bin/example_path3
+            add file tmp/example_two mode=0555 owner=root group=bin path=/usr/bin/example_path4
             close"""
 
-        misc_files = ["tmp/example_file"]
+        baz10 = """
+            open [email protected],5.11-0
+            add file tmp/example_three mode=0555 owner=root group=bin path=/usr/bin/another
+            close"""
+
+        misc_files = ["tmp/example_file", "tmp/example_two",
+            "tmp/example_three"]
 
         expected_all_access =  """\
 PUBLISHER\tSTICKY\tSYSPUB\tENABLED\tTYPE\tSTATUS\tURI\tPROXY
@@ -127,6 +132,14 @@
                 self.durl1 = self.dcs[1].get_depot_url()
                 self.durl2 = self.dcs[2].get_depot_url()
                 self.durl3 = self.dcs[3].get_depot_url()
+
+                # we make self.durl3 multi-hash aware, to ensure that the
+                # system-repository can serve packages published with multiple
+                # hashes.
+                self.dcs[3].stop()
+                self.dcs[3].set_debug_feature("hash=sha1+sha256")
+                self.dcs[3].start()
+
                 self.durl4 = self.dcs[4].get_depot_url()
                 self.durl5 = self.dcs[5].get_depot_url()
 
@@ -142,7 +155,11 @@
 
                 self.pkgsend_bulk(self.rurl1, self.example_pkg10)
                 self.pkgsend_bulk(self.rurl2, self.foo10)
-                self.pkgsend_bulk(self.rurl3, self.bar10)
+                # We send to rurl3 using multi-hash aware publication
+                self.pkgsend_bulk(self.rurl3, self.bar10,
+                    debug_hash="sha1+sha256")
+                self.pkgsend_bulk(self.rurl3, self.baz10,
+                    debug_hash="sha1+sha256")
                 self.pkgsend_bulk(self.rurl4, self.bar10)
                 self.pkgsend_bulk(self.rurl5, self.foo11)
 
@@ -593,6 +610,11 @@
                 # Test that the current api object has the right catalog.
                 self._api_install(api_obj, ["foo", "bar"])
 
+                # Test that we can install a multi-hash package
+                self.pkg("install baz")
+                self.pkg("contents -m baz")
+                self.assert_("pkg.hash.sha256" in self.output)
+
         def test_02_communication(self):
                 """Test that the transport for communicating with the depots is
                 actually going through the proxy. This is done by
@@ -789,9 +811,9 @@
 
                 # Find the hashes that will be included in the urls of the
                 # proxied file repos.
-                hash1 = hashlib.sha1("file://" +
+                hash1 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[1].get_repodir().rstrip("/")).hexdigest()
-                hash3 = hashlib.sha1("file://" +
+                hash3 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[3].get_repodir().rstrip("/")).hexdigest()
 
                 # Check that a user can add and remove mirrors,
@@ -926,6 +948,7 @@
 
                 expected = """\
 bar (test3) 1.0-0 ---
+baz (test3) 1.0-0 ---
 example_pkg 1.0-0 ---
 """
                 self.__check_package_lists(expected)
@@ -953,6 +976,7 @@
 
                 expected = """\
 bar (test3) 1.0-0 ---
+baz (test3) 1.0-0 ---
 example_pkg 1.0-0 ---
 """
                 self.__check_package_lists(expected)
@@ -1281,11 +1305,11 @@
 
                 # Find the hashes that will be included in the urls of the
                 # proxied file repos.
-                hash1 = hashlib.sha1("file://" +
+                hash1 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[1].get_repodir().rstrip("/")).hexdigest()
-                hash2 = hashlib.sha1("file://" +
+                hash2 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[2].get_repodir().rstrip("/")).hexdigest()
-                hash3 = hashlib.sha1("file://" +
+                hash3 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[3].get_repodir().rstrip("/")).hexdigest()
 
                 expected = """\
@@ -1339,11 +1363,11 @@
 
                 # Find the hashes that will be included in the urls of the
                 # proxied file repos.
-                hash1 = hashlib.sha1("file://" +
+                hash1 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[1].get_repodir().rstrip("/")).hexdigest()
-                hash2 = hashlib.sha1("file://" +
+                hash2 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[2].get_repodir().rstrip("/")).hexdigest()
-                hash3 = hashlib.sha1("file://" +
+                hash3 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[3].get_repodir().rstrip("/")).hexdigest()
 
                 self.__set_responses("all-access-f")
@@ -1379,11 +1403,11 @@
 
                 # Find the hashes that will be included in the urls of the
                 # proxied file repos.
-                hash1 = hashlib.sha1("file://" +
+                hash1 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[1].get_repodir().rstrip("/")).hexdigest()
-                hash2 = hashlib.sha1("file://" +
+                hash2 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[2].get_repodir().rstrip("/")).hexdigest()
-                hash3 = hashlib.sha1("file://" +
+                hash3 = digest.DEFAULT_HASH_FUNC("file://" +
                     self.dcs[3].get_repodir().rstrip("/")).hexdigest()
 
                 expected = """\
--- a/src/tests/cli/t_pkgdep.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkgdep.py	Fri Oct 11 16:09:34 2013 +0100
@@ -437,8 +437,9 @@
 
 Subcommands:
         pkgdepend generate [-IMm] -d dir [-d dir] [-D name=value] [-k path]
-            manifest_path
-        pkgdepend [options] resolve [-dmosv] manifest ...
+            manifest_file
+        pkgdepend resolve [-EmoSv] [-d output_dir]
+            [-e external_package_file]... [-s suffix] manifest_file ...
 
 Options:
         -R dir
--- a/src/tests/cli/t_pkgrecv.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkgrecv.py	Fri Oct 11 16:09:34 2013 +0100
@@ -48,6 +48,8 @@
 import unittest
 import zlib
 
+from pkg.digest import DEFAULT_HASH_FUNC
+
 class TestPkgrecvMulti(pkg5unittest.ManyDepotTestCase):
         # Cleanup after every test.
         persistent_setup = False
@@ -241,7 +243,8 @@
                                 # Since the file shouldn't be compressed, this
                                 # should return a zlib.error.
                                 self.assertRaises(zlib.error,
-                                    misc.gunzip_from_stream, ifile, ofile)
+                                    misc.gunzip_from_stream, ifile, ofile,
+                                    ignore_hash=True)
 
                 # Next, send it to another depot
                 self.pkgsend(self.durl2, "open [email protected]")
@@ -266,8 +269,9 @@
                 old = orepo.manifest(f)
                 new = os.path.join(self.tempdir, f.get_dir_path(), "manifest")
 
-                self.assertEqual(misc.get_data_digest(old),
-                    misc.get_data_digest(new))
+                self.assertEqual(
+                    misc.get_data_digest(old, hash_func=DEFAULT_HASH_FUNC),
+                    misc.get_data_digest(new, hash_func=DEFAULT_HASH_FUNC))
 
                 # Next, load the manifest.
                 m = manifest.Manifest()
@@ -284,8 +288,10 @@
                                 new = os.path.join(self.tempdir,
                                     f.get_dir_path(), a.hash)
                                 self.assertNotEqual(old, new)
-                                self.assertEqual(misc.get_data_digest(old),
-                                    misc.get_data_digest(new))
+                                self.assertEqual(misc.get_data_digest(old,
+                                    hash_func=DEFAULT_HASH_FUNC),
+                                    misc.get_data_digest(new,
+                                    hash_func=DEFAULT_HASH_FUNC))
 
                 # Second, pkgrecv to the pkg to a file repository.
                 npath = tempfile.mkdtemp(dir=self.test_root)
@@ -301,8 +307,9 @@
 
                 self.debug(old)
                 self.debug(new)
-                self.assertEqual(misc.get_data_digest(old),
-                    misc.get_data_digest(new))
+                self.assertEqual(
+                    misc.get_data_digest(old, hash_func=DEFAULT_HASH_FUNC),
+                    misc.get_data_digest(new, hash_func=DEFAULT_HASH_FUNC))
 
                 # Next, load the manifest.
                 m = manifest.Manifest()
@@ -318,8 +325,10 @@
                                 old = orepo.file(a.hash)
                                 new = nrepo.file(a.hash)
                                 self.assertNotEqual(old, new)
-                                self.assertEqual(misc.get_data_digest(old),
-                                    misc.get_data_digest(new))
+                                self.assertEqual(misc.get_data_digest(old,
+                                    hash_func=DEFAULT_HASH_FUNC),
+                                    misc.get_data_digest(new,
+                                    hash_func=DEFAULT_HASH_FUNC))
 
                 # Third, pkgrecv to the pkg to a http repository from the
                 # file repository from the last test.
@@ -332,8 +341,9 @@
                 old = orepo.manifest(f)
                 new = nrepo.manifest(f)
 
-                self.assertEqual(misc.get_data_digest(old),
-                    misc.get_data_digest(new))
+                self.assertEqual(
+                    misc.get_data_digest(old, hash_func=DEFAULT_HASH_FUNC),
+                    misc.get_data_digest(new, hash_func=DEFAULT_HASH_FUNC))
 
                 # Next, load the manifest.
                 m = manifest.Manifest()
@@ -349,8 +359,11 @@
                                 old = orepo.file(a.hash)
                                 new = nrepo.file(a.hash)
                                 self.assertNotEqual(old, new)
-                                self.assertEqual(misc.get_data_digest(old),
-                                    misc.get_data_digest(new))
+                                self.assertEqual(
+                                    misc.get_data_digest(old,
+                                    hash_func=DEFAULT_HASH_FUNC),
+                                    misc.get_data_digest(new,
+                                    hash_func=DEFAULT_HASH_FUNC))
 
                 # Fourth, create an image and verify that the sent package is
                 # seen by the client.
@@ -373,8 +386,9 @@
                 old = orepo.manifest(f)
                 new = nrepo.manifest(f)
 
-                self.assertEqual(misc.get_data_digest(old),
-                    misc.get_data_digest(new))
+                self.assertEqual(
+                    misc.get_data_digest(old, hash_func=DEFAULT_HASH_FUNC),
+                    misc.get_data_digest(new, hash_func=DEFAULT_HASH_FUNC))
 
         def test_3_recursive(self):
                 """Verify that retrieving a package recursively will retrieve
@@ -540,7 +554,8 @@
                                 # Since the file shouldn't be compressed, this
                                 # should return a zlib.error.
                                 self.assertRaises(zlib.error,
-                                    misc.gunzip_from_stream, ifile, ofile)
+                                    misc.gunzip_from_stream, ifile, ofile,
+                                    ignore_hash=True)
 
                 for var in ("PKG_SRC", "PKG_DEST"):
                         del os.environ[var]
@@ -843,7 +858,7 @@
                 # Test basic operation of cloning repo which contains one
                 # publisher to repo which contains same publisher
                 self.pkgrecv(self.durl1, "--clone -d %s" % self.dpath2)
-                
+
                 ret = subprocess.call(["/usr/bin/gdiff", "-Naur", "-x", 
                     "index", "-x", "trans", self.dpath1, self.dpath2])
                 self.assertTrue(ret==0)
@@ -880,7 +895,7 @@
                 # Test that clone fails if --raw is specified.
                 self.pkgrecv(self.durl1, "--raw --clone -d %s -p test2" %
                     self.dpath2, exit=2)
-                
+
                 # Test that clone fails if -c is specified.
                 self.pkgrecv(self.durl1, "-c /tmp/ --clone -d %s -p test2" %
                     self.dpath2, exit=2)
@@ -893,6 +908,54 @@
                 self.pkgrecv(self.durl1, "--newest --clone -d %s -p test2" %
                     self.dpath2, exit=2)
 
+        def test_12_multihash(self):
+                """Tests that we can recv to and from repositories with
+                multi-hash support, interoperating with repositories without
+                multi-hash support."""
+
+                f = fmri.PkgFmri(self.published[3], None)
+
+                # We create an image simply so we can use "contents -g" to
+                # inspect the repository.
+                self.image_create()
+
+                # First, recv the package and verify it has no extended hashes
+                self.pkgrecv(self.durl1, "-d %s %s" % (self.durl3, f))
+                self.pkg("contents -g %s -m %s" % (self.durl3, f))
+                self.assert_("pkg.hash.sha256" not in self.output)
+
+                # Now stop and start the repository as multi-hash aware, and
+                # recv it again, making sure that we do not get multiple hashes
+                # added (because modifying the manifest would break signatures)
+                self.dcs[3].stop()
+                self.dcs[3].set_debug_feature("hash=sha1+sha256")
+                self.dcs[3].start()
+                self.pkgrecv(self.durl1, "-d %s %s" % (self.durl3, f))
+                self.pkg("contents -g %s -m %s" % (self.durl3, f))
+                self.assert_("pkg.hash.sha256" not in self.output)
+
+                # Now check the reverse - that a package with multiple hashes
+                # can be received into a repository that is not multi-hash aware
+                b = "[email protected],5.11-0"
+                self.pkgsend_bulk(self.durl3, self.bronze10)
+                self.pkg("contents -g %s -m %s" % (self.durl3, b))
+                self.assert_("pkg.hash.sha256" in self.output)
+                self.pkgrecv(self.durl3, "-d %s %s" % (self.durl4, b))
+                self.pkg("contents -g %s -m %s" % (self.durl4, b))
+                self.assert_("pkg.hash.sha256" in self.output)
+
+                # Ensure that we can recv multi-hash packages into p5p files
+                p5p_path = os.path.join(self.test_root, "multi-hash.p5p")
+                self.pkgrecv(self.durl3, "-ad %s %s" % (p5p_path, b))
+                self.pkg("contents -g %s -m %s" % (p5p_path, b))
+                self.assert_("pkg.hash.sha256" in self.output)
+
+                # Finally, stop and start our scratch repository to clear the
+                # debug feature. If this doesn't happen because we've failed
+                # before now, it's not the end of the world.
+                self.dcs[3].stop()
+                self.dcs[3].unset_debug_feature("hash=sha1+sha256")
+                self.dcs[3].start()
 
 class TestPkgrecvHTTPS(pkg5unittest.HTTPSTestClass):
 
@@ -908,7 +971,7 @@
 
                 pkg5unittest.HTTPSTestClass.setUp(self, pubs,
                     start_depots=True)
-                
+
                 self.srurl = self.dcs[1].get_repo_url()
                 self.make_misc_files(self.misc_files)
                 self.pkgsend_bulk(self.srurl, self.example_pkg10)
--- a/src/tests/cli/t_pkgrepo.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkgrepo.py	Fri Oct 11 16:09:34 2013 +0100
@@ -111,6 +111,8 @@
             close
         """
 
+        # These hashes should remain as SHA-1 until such time as we bump the
+        # least-preferred hash for actions.
         fhashes = {
              "tmp/empty": "5f5fb715934e0fa2bfb5611fd941d33228027006",
              "tmp/truck1": "c9e257b659ace6c3fbc4d334f49326b3889fd109",
@@ -1354,6 +1356,38 @@
                                 continue
                         self.assert_(not os.listdir(rstore.file_root))
 
+                # Reset the src_repo for the rest of the test.
+                shutil.rmtree(src_repo)
+                self.create_repo(src_repo)
+                self.pkgrepo("set -s %s publisher/prefix=test" % src_repo)
+
+                published = self.pkgsend_bulk(src_repo, (self.tree10),
+                    debug_hash="sha1+sha256")
+
+                # Verify that we only have SHA-1 hashes in the rstore
+                repo = self.get_repo(src_repo)
+                known_hashes = self.fhashes.values()
+                for rstore in repo.rstores:
+                        if not rstore.publisher:
+                                continue
+                        for dir, dnames, fnames in os.walk(rstore.file_root):
+                                for f in fnames:
+                                        if f not in known_hashes:
+                                                self.assert_(False,
+                                                    "Unexpected content in "
+                                                    "repodir: %s" % f)
+
+                # Verify that when a repository has been published with multiple
+                # hashes, on removal, we only attempt to remove files using the
+                # least-preferred hash.
+                self.pkgrepo("remove -s %s tree" % src_repo)
+
+                # Verify repository file_root is empty.
+                for rstore in repo.rstores:
+                        if not rstore.publisher:
+                                continue
+                        self.assert_(not os.listdir(rstore.file_root))
+
                 # Cleanup.
                 shutil.rmtree(src_repo)
                 shutil.rmtree(dest_repo)
@@ -1804,8 +1838,8 @@
                 self.assert_("etc/truck1" in self.output)
                 self.assert_("etc/trailer" in self.output)
 
-                # finally, corrupt another file to see that we can also spot
-                # files that aren't gzipped.
+                # Corrupt another file to see that we can also spot files that
+                # aren't gzipped.
                 fmris += self.pkgsend_bulk(repo_path, (self.truck20))
                 bad_gzip_path = self.__inject_badhash("tmp/truck2",
                     valid_gzip=False)
@@ -1818,6 +1852,37 @@
                     self.output.count("ERROR: Corrupted gzip file") == 1)
                 self.assert_(bad_gzip_path in self.output)
 
+                # Check that when verifying content, we always use the most
+                # preferred hash. Remove all existing packages first.
+                self.pkgrepo("-s %s remove %s" % (repo_path, " ".join(fmris)))
+                fmris = self.pkgsend_bulk(repo_path, (self.tree10),
+                    debug_hash="sha1+sha256")
+                self.pkgrepo("-s %s verify" % repo_path, exit=0)
+
+                # break a file in the repository and ensure we spot it.
+                bad_hash_path = self.__inject_badhash("tmp/truck1")
+                bad_basename = os.path.basename(bad_hash_path)
+
+                self.pkgrepo("-s %s verify" % repo_path, exit=1)
+                self.assert_(
+                    self.output.count("ERROR: Invalid file hash") == 1)
+
+                # We should be verifying using the SHA-2 hash, and so we should
+                # only see the SHA-1 value in the output once, when printing
+                # the path to the file in the repository, not when reporting
+                # the computed or expected hash.
+                self.assert_(self.output.count(bad_basename) == 1)
+
+                # Verify that when we publish using SHA-1 only, that we get
+                # the SHA-1 value printed twice: once when printing the path
+                # to the file in the repository, and once when printing the
+                # expected hash.
+                self.pkgrepo("-s %s remove %s" % (repo_path, " ".join(fmris)))
+                fmris = self.pkgsend_bulk(repo_path, (self.tree10))
+                self.__inject_badhash("tmp/truck1")
+
+                self.pkgrepo("-s %s verify" % repo_path, exit=1)
+                self.assert_(self.output.count(bad_basename) == 2)
 
         def test_12_verify_badmanifest(self):
                 """Test that verify finds bad manifests."""
@@ -2160,6 +2225,18 @@
                 self.pkgrepo("-s %s fix -p missing" % repo_path, exit=1)
                 self.assert_("no matching publishers" in self.errout)
 
+        def test_24_invalid_repo(self):
+                """Test that trying to open an invalid repository is handled
+                correctly"""
+
+                tmpdir = tempfile.mkdtemp(dir=self.test_root)
+
+                with open(os.path.join(tmpdir, "pkg5.image"), "w") as f:
+                    f.write("[image]\nversion = 2")
+
+                self.assertRaises(sr.RepositoryInvalidError, sr.Repository, 
+                    root=tmpdir)
+                
 
 class TestPkgrepoHTTPS(pkg5unittest.HTTPSTestClass):
 
--- a/src/tests/cli/t_pkgsend.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkgsend.py	Fri Oct 11 16:09:34 2013 +0100
@@ -39,6 +39,7 @@
 
 from pkg import misc
 from pkg.actions import fromstr
+from pkg.digest import DEFAULT_HASH_FUNC
 import pkg.portable as portable
 
 
@@ -689,10 +690,11 @@
                                 f = file(fpath, "wb")
                                 f.write("test" + entry)
                                 f.close()
-                                # compute a digest of the file we just created, which
-                                # we can use when validating later.
+                                # compute a digest of the file we just created,
+                                # which we can use when validating later.
                                 contents_dict[entry][4] = \
-                                    misc.get_data_digest(fpath)[0]
+                                    misc.get_data_digest(fpath,
+                                    hash_func=DEFAULT_HASH_FUNC)[0]
 
                         elif ftype == "d":
                                 try:
@@ -845,9 +847,16 @@
                                 continue
 
                         if digest:
-                                pkg5_digest, contents = misc.get_data_digest(name, return_content=True)
+                                # the hash_func used here just needs to
+                                # correspond with the one used when creating
+                                # the svr4 package - it does not consult the
+                                # pkg(5) hash or chash attributes.
+                                pkg5_digest, contents = misc.get_data_digest(
+                                    name, return_content=True,
+                                    hash_func=DEFAULT_HASH_FUNC)
                                 self.assertEqual(digest, pkg5_digest,
-                                    "%s: %s != %s, '%s'" % (name, digest, pkg5_digest, contents))
+                                    "%s: %s != %s, '%s'" % (name, digest,
+                                    pkg5_digest, contents))
 
                         st = os.stat(os.path.join(self.img_path(), name))
                         if mode is not None:
@@ -1280,6 +1289,39 @@
                     add license license=copyright
                     close""", exit=1)
 
+        def test_26_pkgsend_multihash(self):
+                """Tests that when publishing packages with mutiple hashes,
+                we only overwrite those hashes if we're in multi-hash mode
+                and only if they match the hash attributes we know how to
+                compute, other attributes are left alone."""
+
+                # we use a file:// URI rather than the repo URI so we don't have
+                # to worry about starting the depot in SHA-2 mode. Other tests
+                # in the test suite ensure SHA-2 publication is working over
+                # HTTP.
+                furi = self.dc.get_repo_url()
+                mfpath = os.path.join(self.test_root, "pkgsend_multihash.mf")
+                payload = self.make_misc_files(["pkgsend_multihash"])[0]
+
+                with open(mfpath, "wb") as mf:
+                        mf.write("""
+set name=pkg.fmri value=pkg:/[email protected]
+file %s path=/foo owner=root group=sys mode=0644 pkg.hash.sha256=spaghetti \
+    pkg.hash.rot13=caesar
+""" % payload)
+                self.pkgsend("", "-s %s publish %s" % (furi, mfpath))
+                self.image_create(furi)
+                self.pkg("contents -rm multihash")
+                self.assert_("pkg.hash.sha256=spaghetti" in self.output)
+
+                self.pkgsend("", "-s %s publish %s" % (furi, mfpath),
+                    debug_hash="sha1+sha256")
+                self.pkg("refresh")
+
+                self.pkg("contents -rm multihash")
+                self.assert_("pkg.hash.sha256=spaghetti" not in self.output)
+                self.assert_("pkg.hash.rot13=caesar" in self.output)
+
 
 class TestPkgsendHardlinks(pkg5unittest.CliTestCase):
 
--- a/src/tests/cli/t_pkgsign.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkgsign.py	Fri Oct 11 16:09:34 2013 +0100
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2010, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -39,6 +39,7 @@
 import pkg.actions as action
 import pkg.actions.signature as signature
 import pkg.client.api_errors as apx
+import pkg.digest as digest
 import pkg.facet as facet
 import pkg.fmri as fmri
 import pkg.misc as misc
@@ -551,7 +552,7 @@
                             "ch5_ta1_cert.pem"),
                         "pkg": plist[0]
                     }
-                self.pkgsign(self.rurl1, sign_args)
+                self.pkgsign(self.rurl1, sign_args, debug_hash="sha1+sha256")
 
                 sign_args = "-k %(key)s -c %(cert)s %(name)s" % {
                     "name": plist[0],
@@ -565,6 +566,15 @@
                 self.pkg("set-property signature-policy verify")
                 api_obj = self.get_img_api_obj()
                 self._api_install(api_obj, ["example_pkg"])
+
+                # Make sure we've got exactly 1 signature with SHA2 hashes
+                self.pkg("contents -m")
+                self.assert_(self.output.count("pkg.chain.sha256") == 1)
+                self.assert_(self.output.count("pkg.chain.chashes") == 1)
+                # and SHA1 hashes on both signatures
+                self.assert_(self.output.count("chain=") == 2)
+                self.assert_(self.output.count("chain.chashes=") == 2)
+
                 self._api_uninstall(api_obj, ["example_pkg"])
                 self.pkg("set-property signature-policy require-signatures")
                 api_obj = self.get_img_api_obj()
@@ -969,7 +979,7 @@
                 self.write_img_manifest(pfmri, s)
 
                 DebugValues["manifest_validate"] = "Never"
-                
+
                 self.pkg("set-property signature-policy verify")
                 # This should fail because the text of manifest has changed
                 # so the hash should no longer validate.
@@ -2372,6 +2382,11 @@
                 # signature actions in it.
                 self.pkgsign_simple(self.rurl1, plist[0], exit=1)
 
+                # The addition of SHA-256 hashes should still result in us
+                # believing the signatures are identical
+                self.pkgsign_simple(self.rurl1, plist[0], exit=1,
+                    debug_hash="sha1+sha256")
+
                 self.pkg_image_create(self.rurl1)
                 self.seed_ta_dir("ta3")
                 self.pkg("set-property signature-policy verify")
@@ -2943,7 +2958,12 @@
                 fd, new_cert = tempfile.mkstemp(dir=self.test_root)
                 with os.fdopen(fd, "wb") as fh:
                         fh.write(cert.as_pem())
-                file_name = misc.get_data_digest(new_cert)[0]
+
+                # the file-store uses the least-preferred hash when storing
+                # content
+                alg = digest.HASH_ALGS[digest.REVERSE_RANKED_HASH_ATTRS[0]]
+                file_name = misc.get_data_digest(new_cert,
+                    hash_func=alg)[0]
                 subdir = os.path.join(cache_dir, file_name[:2])
                 os.mkdir(subdir)
                 fp = os.path.join(subdir, file_name)
@@ -2986,13 +3006,16 @@
                 fd, new_cert = tempfile.mkstemp(dir=self.test_root)
                 with os.fdopen(fd, "wb") as fh:
                         fh.write(cert.as_pem())
-                file_name = misc.get_data_digest(new_cert)[0]
-                subdir = os.path.join(cache_dir, file_name[:2])
-                os.mkdir(subdir)
-                fp = os.path.join(subdir, file_name)
-                fh = PkgGzipFile(fp, "wb")
-                fh.write(cert.as_pem())
-                fh.close()
+                for attr in digest.DEFAULT_HASH_ATTRS:
+                        alg = digest.HASH_ALGS[attr]
+                        file_name = misc.get_data_digest(new_cert,
+                            hash_func=alg)[0]
+                        subdir = os.path.join(cache_dir, file_name[:2])
+                        os.mkdir(subdir)
+                        fp = os.path.join(subdir, file_name)
+                        fh = PkgGzipFile(fp, "wb")
+                        fh.write(cert.as_pem())
+                        fh.close()
 
                 self.pkgrecv(self.rurl2, "-c %s -d %s '*'" %
                     (cache_dir, self.rurl1))
--- a/src/tests/cli/t_pkgsurf.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_pkgsurf.py	Fri Oct 11 16:09:34 2013 +0100
@@ -30,23 +30,14 @@
 import pkg5unittest
 
 import os
-import pkg.catalog as catalog
-import pkg.config as cfg
-import pkg.client.pkgdefs as pkgdefs
+import pkg.digest as digest
 import pkg.fmri as fmri
 import pkg.manifest as manifest
 import pkg.misc as misc
-import pkg.p5p as p5p
-import pkg.portable as portable
-import pkg.server.repository as repo
 import shutil
 import subprocess
 import tempfile
-import time
-import urllib
-import urlparse
 import unittest
-import zlib
 
 class TestPkgsurf(pkg5unittest.ManyDepotTestCase):
         # Cleanup after every test.
@@ -57,7 +48,7 @@
         # Since we publish the expected package to an additional repo, we have
         # to set the timestamps to make sure the target and expected packages
         # are equal.
-        
+
         # The test cases are mainly in the different types of packages we
         # have in the repo.
 
@@ -122,7 +113,7 @@
 
         hammerhead_exp = hammerhead_targ
 
-        # Package has only dep change but dependency package changed, 
+        # Package has only dep change but dependency package changed,
         # should not be reversioned.
         blue_ref = """
             open [email protected],5.11-0:20000101T000000Z
@@ -152,7 +143,7 @@
         """
 
         bull_exp = bull_targ
-        
+
         # Package has only dep change and dependency package didn't change,
         # should be reversioned.
         mako_ref = """
@@ -188,7 +179,7 @@
         # dependencies should be fixed.
         # Pkg has all sorts of actions to make sure everything gets moved
         # correctly.
-        
+
         angel_ref = """
             open [email protected],5.11-0:20000101T000000Z
             add file tmp/bat mode=0444 owner=root group=bin path=/etc/angel
@@ -236,7 +227,7 @@
 
         # Package has content change and depends on package which didn't get
         # reversioned, shouldn't be touched.
-        
+
         horn_ref = """
             open [email protected],5.11-0:20000101T000000Z
             add file tmp/bat mode=0444 owner=root group=bin path=/etc/horn
@@ -254,9 +245,9 @@
         horn_exp = horn_targ
 
 
-        # Package has content change but has require-any dep on package which 
+        # Package has content change but has require-any dep on package which
         # got reversioned, dependencies should be fixed.
-        
+
         lemon_ref = """
             open [email protected],5.11-0:20000101T000000Z
             add file tmp/bat mode=0444 owner=root group=bin path=/etc/lemon
@@ -283,7 +274,7 @@
         # version. The version of the pkg in the ref repo should be substituted
         # for tiger but not for sandtiger (since dep pkg is still successor of
         # dep FMRI).
-        
+
         leopard_ref = """
             open [email protected],5.11-0:20000101T000000Z
             add file tmp/bat mode=0444 owner=root group=bin path=/etc/leopard
@@ -347,18 +338,18 @@
         # Package has no content change but a change in an attribute,
         # should be treated as content change by default but reversioned if
         # proper CLI options are given (goblin_exp is just for the default
-        # behavior, gets modified in actual test case) 
+        # behavior, gets modified in actual test case)
 
         goblin_ref = """
             open [email protected],5.11-0:20000101T000000Z
-            add set name=info.home value="deep sea" 
+            add set name=info.home value="deep sea"
             add file tmp/bat mode=0444 owner=root group=bin path=/etc/goblin
             close
         """
 
         goblin_targ = """
             open [email protected],5.11-0:20000101T000000Z
-            add set name=info.home value="deeper sea" 
+            add set name=info.home value="deeper sea"
             add file tmp/bat mode=0444 owner=root group=bin path=/etc/goblin
             close
         """
@@ -422,7 +413,7 @@
             close
         """
 
-        sleeper_exp = sleeper_ref    
+        sleeper_exp = sleeper_ref
 
 
         # Check for correct handling of Varcets. Pkg contains same dep FMRI stem
@@ -456,7 +447,7 @@
         """
 
         # Pkg in ref repo is newer than the one in target.
-        # Should not be reversioned. 
+        # Should not be reversioned.
         thresher_ref = """
             open [email protected],5.11-0:20000101T000000Z
             close
@@ -470,7 +461,7 @@
         thresher_exp = thresher_targ
 
         # Package only found in target, not in ref.
-        # Package has a dep on a reversioned pkg, but the reversioned pkg is 
+        # Package has a dep on a reversioned pkg, but the reversioned pkg is
         # still a successor of the dep FMRI.
         # The dep should not be changed.
         bamboo_targ = """
@@ -480,7 +471,7 @@
         """
 
         bamboo_exp = bamboo_targ
-        
+
 
         # Create some packages for an additional publisher
         humpback_targ = """
@@ -488,10 +479,10 @@
             close
         """
 
-        humpback_ref = """                                                     
-            open pkg://cetacea/[email protected],5.11-0:20000101T000000Z             
-            close                                                               
-        """                                                                     
+        humpback_ref = """
+            open pkg://cetacea/[email protected],5.11-0:20000101T000000Z
+            close
+        """
 
         humpback_exp = humpback_targ
 
@@ -522,7 +513,7 @@
                                 pass
                         self.targ_pkgs.append(getattr(self, targ))
                         self.exp_pkgs.append(getattr(self, exp))
-                        
+
                 pkg5unittest.ManyDepotTestCase.setUp(self, ["selachii",
                     "selachii", "selachii", "selachii"], start_depots=True)
 
@@ -543,9 +534,9 @@
                 self.published_exp = self.pkgsend_bulk(self.dpath3,
                     self.exp_pkgs)
 
-                # keep a tmp repo to copy the target into for each new test 
+                # keep a tmp repo to copy the target into for each new test
                 self.dpath_tmp = self.dcs[4].get_repodir()
-                
+
         def test_0_options(self):
                 """Check for correct input handling."""
                 self.pkgsurf("-x", exit=2)
@@ -584,7 +575,7 @@
                 self.pkgsurf("-s %s -r %s" % (tempdir, self.dpath1), exit=1)
                 self.pkgsurf("-s %s -r %s" % (self.dpath1, tempdir), exit=1)
 
-                # Repo empty 
+                # Repo empty
                 self.pkgrepo("create -s %s" % tempdir)
                 self.pkgsurf("-s %s -r %s" % (tempdir, self.dpath1), exit=1)
                 self.pkgsurf("-s %s -r %s" % (self.dpath1, tempdir), exit=1)
@@ -595,7 +586,7 @@
                 self.assertTrue("No packages to reversion." in self.output)
                 self.pkgsurf("-s %s -r %s" % (self.dpath1, tempdir))
                 self.assertTrue("No packages to reversion." in self.output)
-                shutil.rmtree(tempdir)             
+                shutil.rmtree(tempdir)
 
                 # Now check if it actually works.
                 self.pkgsurf("-s %s -r %s" % (self.dpath_tmp, self.dpath1))
@@ -632,7 +623,7 @@
                 # Just run again and see if goblin pkg now gets reversioned.
                 self.pkgsurf("-s %s -r %s -i info.home" % (self.dpath_tmp,
                     self.dpath1))
-                
+
                 # Find goblin package
                 for s in self.published_ref:
                         if "goblin" in s:
@@ -640,8 +631,10 @@
                 f = fmri.PkgFmri(s, None)
                 targ = targ_repo.manifest(f)
                 ref = ref_repo.manifest(f)
-                self.assertEqual(misc.get_data_digest(targ),
-                    misc.get_data_digest(ref))
+                self.assertEqual(misc.get_data_digest(targ,
+                    hash_func=digest.DEFAULT_HASH_FUNC),
+                    misc.get_data_digest(ref,
+                    hash_func=digest.DEFAULT_HASH_FUNC))
 
                 # Check that running the tool again doesn't find any pkgs
                 # to reversion. Use http for accessing reference repo this time.
--- a/src/tests/cli/t_sysrepo.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/cli/t_sysrepo.py	Fri Oct 11 16:09:34 2013 +0100
@@ -49,6 +49,8 @@
 import pkg.misc as misc
 import pkg.portable as portable
 
+from pkg.digest import DEFAULT_HASH_FUNC
+
 SYSREPO_USER = "pkg5srv"
 
 class TestBasicSysrepoCli(pkg5unittest.ApacheDepotTestCase):
@@ -849,8 +851,10 @@
                 os.rename(repo_dir, repo_dir + ".new")
                 try:
                         self.sysrepo("", stderr=True)
-                        self.assert_(misc.get_data_digest(sysrepo_conf)[0] ==
-                            misc.get_data_digest(saved_sysrepo_conf)[0],
+                        self.assert_(misc.get_data_digest(sysrepo_conf,
+                            hash_func=DEFAULT_HASH_FUNC)[0] ==
+                            misc.get_data_digest(saved_sysrepo_conf,
+                            hash_func=DEFAULT_HASH_FUNC)[0],
                             "system repository configuration changed "
                             "unexpectedly.")
                 finally:
--- a/src/tests/pkg5unittest.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/tests/pkg5unittest.py	Fri Oct 11 16:09:34 2013 +0100
@@ -2447,8 +2447,14 @@
                     su_wrap=su_wrap, env_arg=env_arg)
 
         def pkgrepo(self, command, comment="", exit=0, su_wrap=False,
-            env_arg=None, stderr=False, out=False):
-                cmdline = "%s/usr/bin/pkgrepo %s" % (g_proto_area, command)
+            env_arg=None, stderr=False, out=False, debug_hash=None):
+                if debug_hash:
+                        debug_arg = "-D hash=%s " % debug_hash
+                else:
+                        debug_arg = ""
+
+                cmdline = "%s/usr/bin/pkgrepo %s%s" % (g_proto_area, debug_arg,
+                    command)
                 return self.cmdline_run(cmdline, comment=comment, exit=exit,
                     su_wrap=su_wrap, env_arg=env_arg, out=out, stderr=stderr)
 
@@ -2459,11 +2465,14 @@
                     su_wrap=su_wrap, env_arg=env_arg, out=out, stderr=stderr)
 
         def pkgsign(self, depot_url, command, exit=0, comment="",
-            env_arg=None):
+            env_arg=None, debug_hash=None):
                 args = []
                 if depot_url:
                         args.append("-s %s" % depot_url)
 
+                if debug_hash:
+                        args.append("-D hash=%s" % debug_hash)
+
                 if command:
                         args.append(command)
 
@@ -2472,7 +2481,8 @@
                 return self.cmdline_run(cmdline, comment=comment, exit=exit,
                     env_arg=env_arg)
 
-        def pkgsign_simple(self, depot_url, pkg_name, exit=0, env_arg=None):
+        def pkgsign_simple(self, depot_url, pkg_name, exit=0, env_arg=None,
+            debug_hash=None):
                 chain_cert_path = os.path.join(self.chain_certs_dir,
                     "ch1_ta3_cert.pem")
                 sign_args = "-k %(key)s -c %(cert)s -i %(ch1)s %(name)s" % {
@@ -2482,16 +2492,23 @@
                     "ch1": chain_cert_path,
                 }
                 return self.pkgsign(depot_url, sign_args, exit=exit,
-                    env_arg=env_arg)
+                    env_arg=env_arg, debug_hash=debug_hash)
 
         def pkgsend(self, depot_url="", command="", exit=0, comment="",
-            allow_timestamp=False, env_arg=None, su_wrap=False):
+            allow_timestamp=False, env_arg=None, su_wrap=False,
+            debug_hash=None):
                 args = []
                 if allow_timestamp:
                         args.append("-D allow-timestamp")
                 if depot_url:
                         args.append("-s " + depot_url)
 
+                # debug_hash lets us choose the type of hash attributes that
+                # should be added to this package on publication. Valid values
+                # are: sha1, sha1+sha256, sha256
+                if debug_hash:
+                        args.append("-D hash=%s" % debug_hash)
+
                 if command:
                         args.append(command)
 
@@ -2536,7 +2553,8 @@
                 return retcode, published
 
         def pkgsend_bulk(self, depot_url, commands, exit=0, comment="",
-            no_catalog=False, refresh_index=False, su_wrap=False):
+            no_catalog=False, refresh_index=False, su_wrap=False,
+            debug_hash=None):
                 """ Send a series of packaging commands; useful  for quickly
                     doing a bulk-load of stuff into the repo.  All commands are
                     expected to work; if not, the transaction is abandoned.  If
@@ -2603,7 +2621,8 @@
                                                 retcode, published = \
                                                     self.pkgsend(depot_url, cmd,
                                                     allow_timestamp=True,
-                                                    su_wrap=su_wrap)
+                                                    su_wrap=su_wrap,
+                                                    debug_hash=debug_hash)
                                                 if retcode == 0 and published:
                                                         plist.append(published)
                                         except:
@@ -2621,7 +2640,8 @@
 
                         if exit == 0 and refresh_index:
                                 self.pkgrepo("-s %s refresh --no-catalog" %
-                                    depot_url, su_wrap=su_wrap)
+                                    depot_url, su_wrap=su_wrap,
+                                    debug_hash=debug_hash)
                 except UnexpectedExitCodeException, e:
                         if e.exitcode != exit:
                                 raise
@@ -4297,7 +4317,10 @@
 
         def _network_ping(self):
                 try:
-                        urllib2.urlopen(self.url)
+                        # Ping the versions URL, rather than the default /
+                        # so that we don't initialize the BUI code yet.
+                        urllib2.urlopen(urlparse.urljoin(self.url,
+                            "versions/0"))
                 except urllib2.HTTPError, e:
                         if e.code == httplib.FORBIDDEN:
                                 return True
--- a/src/util/apache2/depot/depot_index.py	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/util/apache2/depot/depot_index.py	Fri Oct 11 16:09:34 2013 +0100
@@ -22,7 +22,6 @@
 # Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
 
 import cherrypy
-import hashlib
 import httplib
 import logging
 import mako
@@ -35,6 +34,7 @@
 import urllib
 import Queue
 
+import pkg.digest as digest
 import pkg.p5i
 import pkg.server.api
 import pkg.server.repository as sr
@@ -295,15 +295,20 @@
 
                 for prefix in repo_paths:
                         path = repo_paths[prefix]
-                        repo_hash = hashlib.sha1(path).hexdigest()
+                        repo_hash = digest.DEFAULT_HASH_FUNC(path).hexdigest()
                         index_dir = os.path.sep.join(
                             [self.cache_dir, "indexes", repo_hash])
 
                         # if the index dir exists for this repository, we do not
                         # automatically attempt a refresh.
                         refresh_index = not os.path.exists(index_dir)
-                        repo = sr.Repository(root=path,
-                        read_only=True, writable_root=index_dir)
+                        try:
+                                repo = sr.Repository(root=path,
+                                    read_only=True, writable_root=index_dir)
+                        except sr.RepositoryError, e:
+                                print("Error initializing repository at %s: "
+                                    "%s" % (path, e))
+                                continue
 
                         repositories[prefix] = repo
                         dconf = sd.DepotConfig()
@@ -675,9 +680,9 @@
                             "message": httplib.responses[httplib.NOT_FOUND],
                             "traceback": "",
                             "version": cherrypy.__version__}
-                        print "Path that raised exception was %s" % \
-                            cherrypy.request.path_info
-                        print message
+                        print("Path that raised exception was %s" %
+                            cherrypy.request.path_info)
+                        print(message)
                         return error
                 else:
                         error = cherrypy._cperror._HTTPErrorTemplate % \
--- a/src/web/en/search.shtml	Wed Oct 02 09:22:55 2013 -0700
+++ b/src/web/en/search.shtml	Fri Oct 11 16:09:34 2013 +0100
@@ -111,6 +111,9 @@
                     num_to_return=(rpp + 1),
                     matching_version=mver,
                     return_latest=not sav)
+        except qp.QueryLengthExceeded, e:
+                results = None
+                query_error = str(e)
         except qp.QueryException, e:
                 results = None
                 query_error = str(e)