16015 pkgdepend needs python runpath hints
authorTim Foster <tim.s.foster@oracle.com>
Tue, 22 Feb 2011 10:00:49 +1300
changeset 2236 7b074b5316ec
parent 2235 1f446820dcb0
child 2237 8fde3b36f122
16015 pkgdepend needs python runpath hints 16020 pkgdepend doesn't find native modules 17477 typo in pkgdepend man page 17596 python search path not generated correctly for xml.dom.minidom 17615 pkgdepend generate needs an exclusion mechanism 17619 pkgdepend generate is broken for 64-bit binaries when passing relative run paths
src/man/pkgdepend.1.txt
src/modules/flavor/base.py
src/modules/flavor/depthlimitedmf.py
src/modules/flavor/depthlimitedmf24.py
src/modules/flavor/elf.py
src/modules/flavor/python.py
src/modules/flavor/script.py
src/modules/portable/__init__.py
src/modules/publish/dependencies.py
src/pkgdep.py
src/tests/api/t_dependencies.py
src/tests/cli/t_pkgdep.py
src/tests/cli/t_pkgdep_resolve.py
--- a/src/man/pkgdepend.1.txt	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/man/pkgdepend.1.txt	Tue Feb 22 10:00:49 2011 +1300
@@ -88,9 +88,45 @@
           For each -D, add the 'value' as a way to expand the token
           'name' in run paths for elf file dependencies.
 
-          For each -k, add the path given to the list of paths in which
-          to look for kernel modules.  Using the -k argument removes the
-          default paths which are /kernel and /usr/kernel.
+          For each -k, add the path given to the list of run paths
+          declared in which to look for kernel modules.  Using the -k
+          argument removes the default paths which are /kernel and 
+          /usr/kernel.
+
+          Run paths, such as those specified by the -k option, can also
+          be specified on per-action or per-manifest basis using the
+          action or manifest attribute "pkg.depend.runpath" with a
+          single value set to a colon-separated string of the paths
+          that should be used.
+
+          The use of -k is overridden by any pkg.depend.runpath
+          attributes set in the manifest or action.
+
+	  The special token "$PKGDEPEND_RUNPATH" can be used as one
+          component of the pkg.depend.runpath attribute value in order
+          to include the standard system run path for the file
+          being analyzed.
+
+          In some cases, it may be necessary to prevent automatic
+          generation of dependencies.  An example where this may be
+          needed is if a package delivers a sample Python script that
+          imports a set of modules: those modules may not be considered
+          to be real dependencies for that package.
+
+          The action or manifest attribute "pkg.depend.bypass-generate"
+          can be used to indicate that we should not generate dependencies
+          against given files.
+
+          pkg.depend.bypass-generate values are perl5 regular expressions,
+          used to match file names.  The regular expressions are implicitly
+          anchored at the start and end of the file path,
+
+          eg.
+               pkg.depend.bypass-generate=this/that
+          matches:
+               this/that
+          but not:
+               something/this/that/the/other
 
      resolve [-mov] [-d output_dir] [-s suffix] manifest_path ...
           Transform dependencies on files into dependencies on the
@@ -147,7 +183,7 @@
      $ ls ./res foo     bar
 
      Example 4: Replace all $PLATFORM tokens in the run paths in elf
-     files with sun4v and sun4u while generating the dependences for the
+     files with sun4v and sun4u while generating the dependencies for the
      manifest written in foo whose content directory is in /.
 
      $ pkgdepend generate -D 'PLATFORM=sun4v' -D 'PLATFORM=sun4u' foo /
@@ -158,6 +194,33 @@
 
      $ pkgdepend generate -k /kmod foo /
 
+     Example 6: Append opt/python to the standard Python run path for a given
+     python script, and bypass dependency generation against all Python
+     modules called "test" for a file delivered as opt/python/foo/file.py.
+
+     We also avoid generating dependencies against any file delivered in
+     usr/lib/python2.6/vendor-packages/xdg.
+
+     $ cat manifest.py
+     set name=pkg.fmri value=pkg:/[email protected],1.0
+     set name=pkg.summary value="My test package"
+     dir path=opt mode=0755 group=sys owner=root
+     dir path=opt/python mode=0755 group=sys owner=root
+     dir path=opt/python/foo mode=0755 group=sys owner=root
+     file NOHASH path=opt/python/__init__.py mode=0644 group=sys owner=root
+     file NOHASH path=opt/python/foo/__init__.py mode=0644 group=sys owner=root
+     #
+     # We add runpath and bypass-generate attributes below:
+     #
+     file NOHASH path=opt/python/foo/file.py mode=0644 group=sys owner=root \
+         pkg.depend.bypass-generate=^.*/test.py.*$ \
+         pkg.depend.bypass-generate=^.*/testmodule.so$ \
+         pkg.depend.bypass-generate=^.*/test.so$ \
+         pkg.depend.bypass-generate=^usr/lib/python2.6/vendor-packages/xdg/.*$ \
+         pkg.depend.runpath=$PKGDEPEND_RUNPATH:/opt/python
+
+     $ pkgdepend generate -d proto manifest.py
+
 ENVIRONMENT VARIABLES
      PKG_IMAGE
           Specifies the directory containing the image to use for package
--- a/src/modules/flavor/base.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/flavor/base.py	Tue Feb 22 10:00:49 2011 +1300
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -29,6 +29,8 @@
 import pkg.actions.depend as depend
 import pkg.variant as variant
 
+from pkg.portable import PD_DEFAULT_RUNPATH
+
 class DependencyAnalysisError(Exception):
 
         def __unicode__(self):
@@ -49,6 +51,31 @@
         def __str__(self):
                 return _("Couldn't find %s") % self.file_path
 
+class MultipleDefaultRunpaths(DependencyAnalysisError):
+        """Exception that is raised when multiple $PGKDEPEND_RUNPATH tokens
+        are found in a pkg.depend.runpath attribute value."""
+
+        def __init__(self):
+                Exception.__init__(self)
+
+        def __str__(self):
+                return _(
+                    "More than one $PKGDEPEND_RUNPATH token was set on the "
+                    "same action in this manifest.")
+
+class InvalidDependBypassValue(DependencyAnalysisError):
+        """Exception that is raised when we encounter an incorrect
+        pkg.depend.bypass-generate attribute value."""
+
+        def __init__(self, value, error):
+                self.value = value
+                self.error = error
+                Exception.__init__(self)
+
+        def __str__(self):
+                return _(
+                    "Invalid pkg.depend.bypass-generate value %(val)s: "
+                    "%(err)s") % {"val": self.value, "err": self.error}
 
 class Dependency(depend.DependencyAction):
         """Base, abstract class to represent the dependencies a dependency
@@ -156,7 +183,12 @@
 
 class PublishingDependency(Dependency):
         """This class serves as a base for all dependencies.  It handles
-        dependencies with multiple files, multiple paths, or both."""
+        dependencies with multiple files, multiple paths, or both.
+
+        File dependencies are stored either as a list of base_names and
+        a list of run_paths, or are expanded, and stored as a list of
+        full_paths to each file that could satisfy the dependency.
+        """
 
         def __init__(self, action, base_names, run_paths, pkg_vars, proto_dir,
             kind):
@@ -180,6 +212,7 @@
                 """
 
                 self.base_names = sorted(base_names)
+                self.full_paths = []
 
                 if proto_dir is None:
                         self.run_paths = sorted(run_paths)
@@ -204,7 +237,10 @@
         def dep_key(self):
                 """Return the a value that represents the path of the
                 dependency. It must be hashable."""
-                return (tuple(self.base_names), tuple(self.run_paths))
+                if self.full_paths:
+                        return (tuple(self.full_paths))
+                else:
+                        return (tuple(self.base_names), tuple(self.run_paths))
 
         def _check_path(self, path_to_check, delivered_files):
                 """Takes a dictionary of files that are known to exist, and
@@ -255,21 +291,33 @@
                 attrs = {
                         "type":"require"
                 }
-                for bn in self.base_names:
-                        for rp in self.run_paths:
-                                path_to_check = os.path.normpath(
-                                    os.path.join(rp, bn))
+                def process_path(path_to_check):
+                        res = []
+                        # Find the potential real paths that path_to_check could
+                        # resolve to.
+                        res_pths, res_links = resolve_links(
+                            path_to_check, delivered_files, links,
+                            orig_dep_vars, attrs)
+                        for res_pth, res_pfmri, res_vc in res_pths:
+                                p = self._check_path(res_pth, delivered_files)
+                                if p:
+                                        res.append((p, res_vc))
+                        return res
 
-                                # Find the potential real paths that
-                                # path_to_check could resolve to.
-                                res_pths, res_links = resolve_links(
-                                    path_to_check, delivered_files, links,
-                                    orig_dep_vars, attrs)
-                                for res_pth, res_pfmri, res_vc in res_pths:
-                                        p = self._check_path(res_pth,
-                                            delivered_files)
-                                        if p:
-                                                res.append((p, res_vc))
+                # if this is an expanded dependency, we iterate over the list of
+                # full paths
+                if self.full_paths:
+                        for path_to_check in self.full_paths:
+                                res.extend(process_path(path_to_check))
+
+                # otherwise, it's a dependency with run_path and base_names
+                # entries
+                else:
+                        for bn in self.base_names:
+                                for rp in self.run_paths:
+                                        path_to_check = os.path.normpath(
+                                            os.path.join(rp, bn))
+                                        res.extend(process_path(path_to_check))
                 return res
 
         def resolve_internal(self, delivered_files, links, resolve_links, *args,
@@ -305,3 +353,22 @@
                         if missing_vars.is_satisfied():
                                 return None, missing_vars
                 return self.ERROR, missing_vars
+
+
+def insert_default_runpath(default_runpath, run_paths):
+        """Insert our default search path where the PD_DEFAULT_PATH token was
+        found, returning an updated list of run paths."""
+        try:
+                new_paths = run_paths
+                index = run_paths.index(PD_DEFAULT_RUNPATH)
+                if index >= 0:
+                        new_paths = run_paths[:index] + \
+                            default_runpath + run_paths[index + 1:]
+                if PD_DEFAULT_RUNPATH in new_paths:
+                        raise MultipleDefaultRunpaths()
+                return new_paths
+
+        except ValueError:
+                # no PD_DEFAULT_PATH token, so we override the
+                # whole default search path
+                return run_paths
--- a/src/modules/flavor/depthlimitedmf.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/flavor/depthlimitedmf.py	Tue Feb 22 10:00:49 2011 +1300
@@ -11,8 +11,10 @@
 
 import modulefinder
 import os
+import pkg.flavor.base as base
 import sys
 
+from pkg.portable import PD_DEFAULT_RUNPATH
 python_path = "PYTHONPATH"
 
 class ModuleInfo(object):
@@ -32,7 +34,8 @@
 
                 self.name = name
                 self.builtin = builtin
-                self.suffixes = [".py", ".pyc", ".pyo", "/__init__.py"]
+                self.suffixes = [".py", ".pyc", ".pyo", "/__init__.py", ".so",
+                    "module.so"]
                 self.dirs = sorted(dirs)
 
         def make_package(self):
@@ -62,7 +65,13 @@
 
         def __init__(self, proto_dir, *args, **kwargs):
                 """Produce a module finder that ignores PYTHONPATH and only
-                reports the direct imports of a module."""
+                reports the direct imports of a module.
+
+                run_paths as a keyword argument specifies a list of additional
+                paths to use when searching for modules."""
+
+                # ModuleFinder.__init__ doesn't expect run_paths
+                run_paths = kwargs.pop("run_paths", [])
 
                 # Check to see whether a python path has been set.
                 if python_path in os.environ:
@@ -80,7 +89,13 @@
                     if not self.startswith_path(fp, py_path)
                 ]
 
-                # Map the standard system paths into the proto area.
+                if run_paths:
+                        # add our detected runpath into the user-supplied one
+                        # (if any)
+                        new_path = base.insert_default_runpath(new_path,
+                            run_paths)
+
+                # Map the run paths into the proto area.
                 new_path = [
                     os.path.join(proto_dir, fp.lstrip("/"))
                     for fp in new_path
@@ -227,7 +242,7 @@
                                 return [ModuleInfo(name, path)]
                         else:
                                 return [q]
-                res = self.load_tail(name, q, tail)
+                res = self.load_tail(q, tail)
                 q.make_package()
                 res.append(q)
                 return res
@@ -270,24 +285,28 @@
                     path = self.path
                 return ModuleInfo(name, path)
 
-        def load_tail(self, name, q, tail):
+        def load_tail(self, q, tail):
                 """Determine where each component of a multilevel import would
                 be found on the file system."""
 
                 self.msgin(4, "load_tail", q, tail)
-                m = q
                 res = []
+                name = q.name
+                cur_parent = q
                 while tail:
                         i = tail.find('.')
                         if i < 0: i = len(tail)
                         head, tail = tail[:i], tail[i+1:]
                         new_name = "%s.%s" % (name, head)
-                        r = self.import_module(head, new_name, q)
+                        r = self.import_module(head, new_name, cur_parent)
                         res.append(r)
                         name = new_name
+                        cur_parent = r
+
                 # All but the last module found must be packages because they
                 # contained other packages.
                 for i in range(0, len(res) - 1):
                         res[i].make_package()
-                self.msgout(4, "load_tail ->", m)
+
+                self.msgout(4, "load_tail ->", q)
                 return res
--- a/src/modules/flavor/depthlimitedmf24.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/flavor/depthlimitedmf24.py	Tue Feb 22 10:00:49 2011 +1300
@@ -26,6 +26,11 @@
 from modulefinder import LOAD_CONST, IMPORT_NAME, STORE_NAME, STORE_GLOBAL, \
     STORE_OPS
 
+# A string used as a component of the pkg.depend.runpath value as a special
+# token to determine where to insert the runpath that pkgdepend generates itself
+# (duplicated from pkg.portable.__init__ for reasons above)
+PD_DEFAULT_RUNPATH = "$PKGDEPEND_RUNPATH"
+
 python_path = "PYTHONPATH"
 
 class ModuleInfo(object):
@@ -45,7 +50,8 @@
 
                 self.name = name
                 self.builtin = builtin
-                self.suffixes = [".py", ".pyc", ".pyo", "/__init__.py"]
+                self.suffixes = [".py", ".pyc", ".pyo", "/__init__.py", ".so",
+                    "module.so"]
                 self.dirs = sorted(dirs)
 
         def make_package(self):
@@ -71,11 +77,30 @@
                 return "name:%s suffixes:%s dirs:%s" % (self.name,
                     " ".join(self.suffixes), len(self.dirs))
 
+class MultipleDefaultRunPaths(Exception):
+
+        def __unicode__(self):
+                # To workaround python issues 6108 and 2517, this provides a
+                # a standard wrapper for this class' exceptions so that they
+                # have a chance of being stringified correctly.
+                return str(self)
+
+        def __str__(self):
+                return _(
+                    "More than one $PKGDEPEND_RUNPATH token was set on the "
+                    "same action in this manifest.")
+
 class DepthLimitedModuleFinder(modulefinder.ModuleFinder):
 
         def __init__(self, proto_dir, *args, **kwargs):
                 """Produce a module finder that ignores PYTHONPATH and only
-                reports the direct imports of a module."""
+                reports the direct imports of a module.
+
+                run_paths as a keyword argument specifies a list of additional
+                paths to use when searching for modules."""
+
+                # ModuleFinder.__init__ doesn't expect run_paths
+                run_paths = kwargs.pop("run_paths", [])
 
                 # Check to see whether a python path has been set.
                 if python_path in os.environ:
@@ -93,6 +118,22 @@
                     if not self.startswith_path(fp, py_path)
                 ]
 
+                if run_paths:
+                        # insert our default search path where the
+                        # PD_DEFAULT_RUNPATH token was found
+                        try:
+                                index = run_paths.index(PD_DEFAULT_RUNPATH)
+                                if index >= 0:
+                                        run_paths = run_paths[:index] + \
+                                            new_path + run_paths[index + 1:]
+                                if PD_DEFAULT_RUNPATH in run_paths:
+                                        raise MultipleDefaultRunPaths()
+                        except ValueError:
+                                # no PD_DEFAULT_PATH token, so we override the
+                                # whole default search path
+                                pass
+                        new_path = run_paths
+
                 # Map the standard system paths into the proto area.
                 new_path = [
                     os.path.join(proto_dir, fp.lstrip("/"))
@@ -235,7 +276,7 @@
                                 return [ModuleInfo(name, path)]
                         else:
                                 return [q]
-                res = self.load_tail(name, q, tail)
+                res = self.load_tail(q, tail)
                 q.make_package()
                 res.append(q)
                 return res
@@ -278,36 +319,49 @@
                     path = self.path
                 return ModuleInfo(name, path)
 
-        def load_tail(self, name, q, tail):
+        def load_tail(self, q, tail):
                 """Determine where each component of a multilevel import would
                 be found on the file system."""
 
                 self.msgin(4, "load_tail", q, tail)
-                m = q
                 res = []
+                name = q.name
+                cur_parent = q
                 while tail:
                         i = tail.find('.')
                         if i < 0: i = len(tail)
                         head, tail = tail[:i], tail[i+1:]
                         new_name = "%s.%s" % (name, head)
-                        r = self.import_module(head, new_name, q)
+                        r = self.import_module(head, new_name, cur_parent)
                         res.append(r)
                         name = new_name
+                        cur_parent = r
+
                 # All but the last module found must be packages because they
                 # contained other packages.
                 for i in range(0, len(res) - 1):
                         res[i].make_package()
-                self.msgout(4, "load_tail ->", m)
+
+                self.msgout(4, "load_tail ->", q)
                 return res
 
 
 if __name__ == "__main__":
-        mf = DepthLimitedModuleFinder(sys.argv[1])
-        loaded_modules = mf.run_script(sys.argv[2])
-        for res in set([
-            (tuple(m.get_file_names()), tuple(m.dirs)) for m in loaded_modules
-        ]):
-                print "DEP %s" % (res,)
-        missing, maybe =  mf.any_missing_maybe()
-        for name in missing:
-                print "ERR %s" % name,
+        """Usage:
+              depthlimitedmf24.py <proto_dir> <script> [ run_path run_path ... ]
+        """
+        run_paths = sys.argv[3:]
+        try:
+                mf = DepthLimitedModuleFinder(sys.argv[1], run_paths=run_paths)
+                loaded_modules = mf.run_script(sys.argv[2])
+                for res in set([
+                    (tuple(m.get_file_names()), tuple(m.dirs)) for m in loaded_modules
+                ]):
+                        print "DEP %s" % (res,)
+                missing, maybe =  mf.any_missing_maybe()
+                for name in missing:
+                        print "ERR %s" % name,
+        except ValueError, e:
+                print "ERR %s" % e
+        except MultipleDefaultRunPaths, e:
+                print e
--- a/src/modules/flavor/elf.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/flavor/elf.py	Tue Feb 22 10:00:49 2011 +1300
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -29,7 +29,7 @@
 import pkg.elf as elf
 import pkg.flavor.base as base
 
-from pkg.portable import PD_LOCAL_PATH, PD_PROTO_DIR
+from pkg.portable import PD_LOCAL_PATH, PD_PROTO_DIR, PD_DEFAULT_RUNPATH
 
 class BadElfFile(base.DependencyAnalysisError):
         """Exception that is raised when the elf dependency checker is given
@@ -144,8 +144,8 @@
 
 default_run_paths = ["/lib", "/usr/lib"]
 
-def process_elf_dependencies(action, pkg_vars, dyn_tok_conv,
-    kernel_paths, **kwargs):
+def process_elf_dependencies(action, pkg_vars, dyn_tok_conv, run_paths,
+    **kwargs):
         """Produce the elf dependencies for the file delivered in the action
         provided.
 
@@ -157,7 +157,7 @@
         'dyn_tok_conv' is the dictionary which maps the dynamic tokens, like
         $PLATFORM, to the values they should be expanded to.
 
-        'kernel_paths' contains the run paths which kernel modules should use.
+        'run_paths' contains the run paths which elf binaries should use.
         """
 
         if not action.name == "file":
@@ -209,7 +209,7 @@
                         for p in dyn_tok_conv.get("$PLATFORM", []):
                                 rp.append("/platform/%s/kernel" % p)
                 # Default kernel search path
-                rp.extend(kernel_paths)
+                rp.extend(["/kernel", "/usr/kernel"])
                 # What subdirectory should we look in for 64-bit kernel modules?
                 if ei["bits"] == 64:
                         if ei["arch"] == "i386":
@@ -224,12 +224,17 @@
                         if p not in rp:
                                 rp.append(p)
 
-        rp, elist = expand_variables(rp, dyn_tok_conv)
+        elist = []
+        if run_paths:
+                # add our detected runpaths into the user-supplied one (if any)
+                rp = base.insert_default_runpath(rp, run_paths)
 
-        elist = [
+        rp, errs = expand_variables(rp, dyn_tok_conv)
+
+        elist.extend([
             UnsupportedDynamicToken(proto_file, installed_path, p, tok)
-            for p, tok in elist
-        ]
+            for p, tok in errs
+        ])
 
         res = []
 
@@ -242,7 +247,9 @@
                                 # XXX We don't resolve dependencies found in
                                 # /platform, since we don't know where under
                                 # /platform to look.
-                                deppath = os.path.join(p, pn, kernel64, fn)[1:]
+                                deppath = \
+                                    os.path.join(p, pn, kernel64, fn).lstrip(
+                                    os.path.sep)
                         else:
                                 # This is a hack for when a runpath uses the 64
                                 # symlink to the actual 64-bit directory.
--- a/src/modules/flavor/python.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/flavor/python.py	Tue Feb 22 10:00:49 2011 +1300
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -127,7 +127,7 @@
 py_bin_re = re.compile(r"^\#\!\s*/usr/bin/python(?P<major>\d+)\.(?P<minor>\d+)")
 py_lib_re = re.compile(r"^usr/lib/python(?P<major>\d+)\.(?P<minor>\d+)/")
 
-def process_python_dependencies(action, pkg_vars, script_path):
+def process_python_dependencies(action, pkg_vars, script_path, run_paths):
         """Analyze the file delivered by the action for any python dependencies.
 
         The 'action' parameter contain the action which delivers the file.
@@ -137,6 +137,9 @@
 
         The 'script_path' parameter is None of the file is not executable, or
         is the path for the binary which is used to execute the file.
+
+        The 'run_paths' parameter is a list of paths that should be searched
+        for modules.
         """
 
         # There are three conditions which determine whether python dependency
@@ -224,7 +227,7 @@
         # of python running, use the default analyzer and don't fork and exec.
         if cur_major == analysis_major and cur_minor == analysis_minor:
                 mf = modulefinder.DepthLimitedModuleFinder(
-                    action.attrs[PD_PROTO_DIR])
+                    action.attrs[PD_PROTO_DIR], run_paths=run_paths)
                 loaded_modules = mf.run_script(local_file)
 
                 for names, dirs in set([
@@ -247,6 +250,9 @@
             "depthlimitedmf%s%s.py" % (analysis_major, analysis_minor))
         cmd = ["python%s.%s" % (analysis_major, analysis_minor), exec_file,
             action.attrs[PD_PROTO_DIR], local_file]
+
+        if run_paths:
+                cmd.extend(run_paths)
         try:
                 sp = subprocess.Popen(cmd, stdout=subprocess.PIPE,
                     stderr=subprocess.PIPE)
--- a/src/modules/flavor/script.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/flavor/script.py	Tue Feb 22 10:00:49 2011 +1300
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import os
@@ -78,6 +78,7 @@
         pkg_attrs = {}
 
         script_path = None
+        run_paths = kwargs.get("run_paths", [])
         # add #! dependency
         if l.startswith("#!"):
                 # Determine whether the file will be delivered executable.
@@ -101,7 +102,7 @@
                                 script_path = l
                 if "python" in l:
                         ds, errs, py_attrs = python.process_python_dependencies(
-                            action, pkg_vars, script_path)
+                            action, pkg_vars, script_path, run_paths)
                         elist.extend(errs)
                         deps.extend(ds)
                         for key in py_attrs:
--- a/src/modules/portable/__init__.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/portable/__init__.py	Tue Feb 22 10:00:49 2011 +1300
@@ -19,7 +19,7 @@
 #
 # CDDL HEADER END
 #
-# Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 # The portable module provide access to methods that require operating system-
@@ -217,6 +217,22 @@
 PD_PROTO_DIR = "pkg.internal.depend.protodir"
 PD_PROTO_DIR_LIST = "pkg.internal.depend.protodirlist"
 
+# A String to be used for an action attribute created for pkgdepend, indicating
+# module or run paths that can be used to specify the paths that it should use
+# when searching for dependencies on given files.  For example setting the
+# elf runpath for elf binaries, or $PYTHONPATH (or sys.path) for python modules.
+PD_RUN_PATH = "pkg.depend.runpath"
+
+# A string used as a component of the pkg.depend.runpath value as a special
+# token to determine where to insert the runpath that pkgdepend automatically
+# generates.
+PD_DEFAULT_RUNPATH = "$PKGDEPEND_RUNPATH"
+
+# A String used for an action attribute to allow pkgdepend to bypass generation
+# of dependencies against a given filename, eg. don't try to generate a
+# dependency on dtracestubs from platform/i86pc/kernel/amd64/unix
+PD_BYPASS_GENERATE = "pkg.depend.bypass-generate"
+
 import platform
 import util as os_util
 
--- a/src/modules/publish/dependencies.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/modules/publish/dependencies.py	Tue Feb 22 10:00:49 2011 +1300
@@ -27,6 +27,7 @@
 import copy
 import itertools
 import os
+import re
 import urllib
 
 from collections import namedtuple
@@ -48,6 +49,11 @@
 reason_prefix = "%s.reason" % base.Dependency.DEPEND_DEBUG_PREFIX
 type_prefix = "%s.type" % base.Dependency.DEPEND_DEBUG_PREFIX
 target_prefix = "%s.target" % base.Dependency.DEPEND_DEBUG_PREFIX
+bypassed_prefix = "%s.bypassed" % base.Dependency.DEPEND_DEBUG_PREFIX
+
+# A tag used to hold the product of the paths_prefix and files_prefix
+# contents, used when bypassing dependency generation
+fullpaths_prefix = "%s.fullpath" % base.Dependency.DEPEND_DEBUG_PREFIX
 
 Entries = namedtuple("Entries", ["delivered", "installed"])
 # This namedtuple is used to hold two items. The first, delivered, is used to
@@ -141,8 +147,9 @@
                         "pkg": self.pkg_vars
                     }
 
-def list_implicit_deps(file_path, proto_dirs, dyn_tok_conv, kernel_paths,
-    remove_internal_deps=True, convert=True):
+
+def list_implicit_deps(file_path, proto_dirs, dyn_tok_conv, run_paths,
+    remove_internal_deps=True, convert=True, ignore_bypass=False):
         """Given the manifest provided in file_path, use the known dependency
         generators to produce a list of dependencies the files delivered by
         the manifest have.
@@ -155,22 +162,29 @@
         'dyn_tok_conv' is the dictionary which maps the dynamic tokens, like
         $PLATFORM, to the values they should be expanded to.
 
-        'kernel_paths' contains the run paths which kernel modules should use.
+        'run_paths' contains the run paths that are used to find modules.
 
         'convert' determines whether PublishingDependencies will be transformed
         to DependencyActions prior to being returned.  This is primarily an
-        option to facilitate testing and debugging."""
+        option to facilitate testing and debugging.
+
+        'ignore_bypass' determines whether to bypass generation of dependencies
+        against certain files or directories.  This is primarily an option to
+        facilitate testing and debugging.
+        """
 
         m, missing_manf_files = __make_manifest(file_path, proto_dirs)
         pkg_vars = m.get_all_variants()
         deps, elist, missing, pkg_attrs = list_implicit_deps_for_manifest(m,
-            proto_dirs, pkg_vars, dyn_tok_conv, kernel_paths)
+            proto_dirs, pkg_vars, dyn_tok_conv, run_paths,
+            ignore_bypass=ignore_bypass)
         rid_errs = []
         if remove_internal_deps:
                 deps, rid_errs = resolve_internal_deps(deps, m, proto_dirs,
                     pkg_vars)
         if convert:
                 deps = convert_to_standard_dep_actions(deps)
+
         return deps, missing_manf_files + elist + rid_errs, missing, pkg_attrs
 
 def convert_to_standard_dep_actions(deps):
@@ -261,7 +275,7 @@
 }
 
 def list_implicit_deps_for_manifest(mfst, proto_dirs, pkg_vars, dyn_tok_conv,
-    kernel_paths):
+    run_paths, ignore_bypass=False):
         """For a manifest, produce the list of dependencies generated by the
         files it installs.
 
@@ -276,7 +290,13 @@
         'dyn_tok_conv' is the dictionary which maps the dynamic tokens, like
         $PLATFORM, to the values they should be expanded to.
 
-        'kernel_paths' contains the run paths which kernel modules should use.
+        'run_paths' contains the run paths used to find modules, this is
+        overridden by any manifest or action attributes setting
+        'pkg.depend.runpath' (portable.PD_RUN_PATH)
+
+        'ignore_bypass' set to True will prevent us from looking up any
+        pkg.depend.bypass-generate attributes - this is primarily to aid
+        debugging and testing.
 
         Returns a tuple of three lists.
 
@@ -298,8 +318,36 @@
         act_list = list(mfst.gen_actions_by_type("file"))
         file_types = portable.get_file_type(act_list)
 
+        if portable.PD_RUN_PATH in mfst:
+                # check for multiple values in a set attribute
+                run_path_str = mfst[portable.PD_RUN_PATH]
+                es = __verify_run_path(run_path_str)
+                if es:
+                        return deps, elist + es, missing, pkg_attrs
+                run_paths = run_path_str.split(":")
+
+        mf_bypass = []
+        if portable.PD_BYPASS_GENERATE in mfst:
+                mf_bypass = __makelist(mfst[portable.PD_BYPASS_GENERATE])
+
         for i, file_type in enumerate(file_types):
                 a = act_list[i]
+
+                a_run_paths = run_paths
+                if portable.PD_RUN_PATH in a.attrs:
+                        a_run_path_str = a.attrs[portable.PD_RUN_PATH]
+                        es = __verify_run_path(a_run_path_str)
+                        if es:
+                                return deps, elist + es, missing, pkg_attrs
+                        a_run_paths = a_run_path_str.split(":")
+
+                bypass = __makelist(
+                    a.attrs.get(portable.PD_BYPASS_GENERATE, mf_bypass))
+                # If we're bypassing all depdendency generation, we can avoid
+                # calling our dispatch_dict function altogether.
+                if (".*" in bypass or "^.*$" in bypass) and not ignore_bypass:
+                        pkg_attrs[bypassed_prefix] = "%s:.*" % a.attrs["path"]
+                        continue
                 try:
                         func = dispatch_dict[file_type]
                 except KeyError:
@@ -311,7 +359,14 @@
                                 ds, errs, attrs = func(action=a,
                                     pkg_vars=pkg_vars,
                                     dyn_tok_conv=dyn_tok_conv,
-                                    kernel_paths=kernel_paths)
+                                    run_paths=a_run_paths)
+
+                                # prune out any dependencies on the files we've
+                                # been asked to avoid creating dependencies on
+                                if bypass and not ignore_bypass:
+                                        ds = \
+                                            __bypass_deps(ds, bypass, pkg_attrs)
+
                                 deps.extend(ds)
                                 elist.extend(errs)
                                 __update_pkg_attrs(pkg_attrs, attrs)
@@ -326,6 +381,118 @@
         for key in new_attrs:
                 pkg_attrs.setdefault(key, []).extend(new_attrs[key])
 
+def __verify_run_path(run_path_str):
+        """Verify we've been passed a single item and ensure it contains
+        at least one non-null string."""
+        if not isinstance(run_path_str, str):
+                # require a colon separated string to potentially enforce
+                # ordering in the future
+                return [base.DependencyAnalysisError(
+                    _("Manifest specified multiple values for %s rather "
+                    "than a single colon-separated string.") %
+                    portable.PD_RUN_PATH)]
+        if set(run_path_str.split(":")) == set([""]):
+                return [base.DependencyAnalysisError(
+                    _("Manifest did not specify any entries for %s, expecting "
+                    "a colon-separated string.") % portable.PD_RUN_PATH)]
+        return []
+
+def __makelist(value):
+        """Given a value, return it if that value is a list, or if it's a
+        string, return a single-element list of just that string."""
+        if isinstance(value, list):
+                return value
+        elif isinstance(value, str):
+                if value:
+                        return [value]
+                else:
+                        return []
+        else:
+                raise ValueError("Value was not a string or a list")
+
+def __bypass_deps(ds, bypass, pkg_attrs):
+        """Return a list of dependencies, excluding any of those that should be
+        bypassed.
+
+        ds         the list of dependencies to operate on
+        bypass     a list of paths on which we should not generate dependencies
+        pkg_attrs  the set of package attributes for this manifest, produced as
+                   a by-product of this dependency generation
+
+        We support regular expressions as entries in the bypass list, matching
+        one or more files.
+
+        If a bypass-list entry is provided that does not contain one of the
+        following characters: ["/", "*", "?"], it is assumed to be a filename,
+        and expanded to the regular expression ".*/<entry>"
+
+        All bypass-list entries are assumed to be regular expressions that are
+        rooted at ^ and $.
+
+        The special match-all wildcard ".*" is dealt with separately,
+        by list_implicit_deps_for_manifest(..)
+        """
+
+        new_ds = []
+        for dep in ds:
+                full_paths = set(make_paths(dep))
+                bypassed_files = set()
+                bypass_regexps = []
+                try:
+                        for item in bypass:
+                                # try to determine if this is a regexp,
+                                # rather than a filename
+                                if "*" in item or "?" in item:
+                                        pass
+                                # if it appears to be a filename, make it match
+                                # all paths to that filename.
+                                elif "/" not in item:
+                                        item = ".*%s%s" % (os.path.sep, item)
+
+                                # always anchor our regular expressions,
+                                # otherwise, we get partial matches for files,
+                                # eg. bypassing "foo.c" would otherwise also
+                                # bypass "foo.cc"
+                                if item:
+                                        if not item.endswith("$"):
+                                                item = item + "$"
+                                        if not item.startswith("^"):
+                                                item = "^" + item
+                                        bypass_regexps.append(re.compile(item))
+                except re.error, e:
+                        raise base.InvalidDependBypassValue(item, e)
+
+                for path in full_paths:
+                        if path in bypass:
+                                bypassed_files.add(path)
+                                continue
+                        for regexp in bypass_regexps:
+                                if regexp.match(path):
+                                        bypassed_files.add(path)
+
+                if bypassed_files:
+                        # remove the old runpath and basename entries from
+                        # the dependency if they were present
+                        dep.attrs.pop(files_prefix, None)
+                        dep.attrs.pop(paths_prefix, None)
+                        dep.base_names = []
+                        dep.run_paths = []
+
+                        # determine our new list of paths
+                        full_paths = full_paths - bypassed_files
+
+                        dep.full_paths = sorted(list(full_paths))
+                        dep.attrs[fullpaths_prefix] = dep.full_paths
+                        pkg_attrs[bypassed_prefix] = \
+                            sorted(list(bypassed_files))
+
+                        # only include the dependency if it still contains data
+                        if full_paths:
+                                new_ds.append(dep)
+                else:
+                        new_ds.append(dep)
+        return new_ds
+
 def __make_manifest(fp, basedirs=None, load_data=True):
         """Given the file path, 'fp', return a Manifest for that path."""
 
@@ -435,7 +602,7 @@
                 vars.append((action_vars, pfmri))
         if errs:
                 # If any packages are in errs, then more than one file delivered
-                # the same path under some configuaration of variants. This
+                # the same path under some configuration of variants. This
                 # situation is unresolvable.
                 raise AmbiguousPathError(errs, file_dep)
         return res, dep_vars
@@ -444,6 +611,9 @@
         """Find all the possible paths which could satisfy the dependency
         'file_dep'."""
 
+        if file_dep.attrs.get(fullpaths_prefix, []):
+                return file_dep.attrs[fullpaths_prefix]
+
         rps = file_dep.attrs.get(paths_prefix, [""])
         files = file_dep.attrs[files_prefix]
         if isinstance(files, basestring):
@@ -570,6 +740,8 @@
                             files_prefix: path
                         })
                         attrs.pop(paths_prefix, None)
+                        attrs.pop(fullpaths_prefix, None)
+
                         assert vc_intersection.intersects(rec_vc), \
                             "vc:%s\nvc_intersection:%s" % \
                             (rec_vc, vc_intersection)
@@ -631,6 +803,7 @@
                         # with the single path that works.
                         for na, nv in new_res:
                                 na.attrs.pop(paths_prefix, None)
+                                na.attrs.pop(fullpaths_prefix, None)
                                 na.attrs[files_prefix] = [p]
 
                         if not res:
@@ -726,7 +899,7 @@
 def is_file_dependency(act):
         return act.name == "depend" and \
             act.attrs.get("fmri", None) == base.Dependency.DUMMY_FMRI and \
-            files_prefix in act.attrs
+            (files_prefix in act.attrs or fullpaths_prefix in act.attrs)
 
 def merge_deps(dest, src):
         """Add the information contained in src's attrs to dest."""
--- a/src/pkgdep.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/pkgdep.py	Tue Feb 22 10:00:49 2011 +1300
@@ -111,7 +111,7 @@
         show_missing = False
         show_usage = False
         isa_paths = []
-        kernel_paths = []
+        run_paths = []
         platform_paths = []
         dyn_tok_conv = {}
         proto_dirs = []
@@ -135,7 +135,7 @@
                 elif opt == "-I":
                         remove_internal_deps = False
                 elif opt == "-k":
-                        kernel_paths.append(arg)
+                        run_paths.append(arg)
                 elif opt == "-m":
                         echo_manf = True
                 elif opt == "-M":
@@ -147,9 +147,6 @@
         if len(pargs) > 2 or len(pargs) < 1:
                 usage(_("Generate only accepts one or two arguments."))
 
-        if not kernel_paths:
-                kernel_paths = ["/kernel", "/usr/kernel"]
-
         if "$ORIGIN" in dyn_tok_conv:
                 usage(_("ORIGIN may not be specified using -D. It will be "
                     "inferred from the\ninstall paths of the files."))
@@ -173,8 +170,7 @@
 
         try:
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(manf,
-                    proto_dirs, dyn_tok_conv, kernel_paths,
-                    remove_internal_deps)
+                    proto_dirs, dyn_tok_conv, run_paths, remove_internal_deps)
         except (actions.MalformedActionError, actions.UnknownActionError), e:
                 error(_("Could not parse manifest %(manifest)s because of the "
                     "following line:\n%(line)s") % { 'manifest': manf ,
--- a/src/tests/api/t_dependencies.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/tests/api/t_dependencies.py	Tue Feb 22 10:00:49 2011 +1300
@@ -58,6 +58,9 @@
             "libc_path": "lib/libc.so.1",
             "pkg_path":
                 "usr/lib/python2.6/vendor-packages/pkg_test/client/__init__.py",
+            "bypass_path": "pkgdep_test/file.py",
+            "runpath_mod_path": "opt/pkgdep_runpath/__init__.py",
+            "runpath_mod_test_path": "opt/pkgdep_runpath/pdtest.py",
             "script_path": "lib/svc/method/svc-pkg-depot",
             "syslog_path": "var/log/syslog",
             "py_mod_path": "usr/lib/python2.6/vendor-packages/cProfile.py",
@@ -161,6 +164,7 @@
 import sys
 import pkg_test.indexer_test.foobar as indexer
 import pkg.search_storage as ss
+import xml.dom.minidom
 from ..misc_test import EmptyI
 """
 
@@ -171,6 +175,7 @@
 import sys
 import pkg_test.indexer_test.foobar as indexer
 import pkg.search_storage as ss
+import xml.dom.minidom
 from pkg_test.misc_test import EmptyI
 """
         # a python module that causes slightly different behaviour in
@@ -691,6 +696,137 @@
 
         script_text = "#!/usr/bin/ksh -p\n"
 
+        # the following scripts and manifests are used to test pkgdepend
+        # runpath and bypass
+        python_bypass_text = """\
+#!/usr/bin/python2.6
+# This python script has an import used to test pkgdepend runpath and bypass
+# functionality. pdtest is installed in a non-standard location and generates
+# dependencies on multiple files (pdtest.py, pdtest.pyc, pdtest.pyo, etc.)
+import pkgdep_runpath.pdtest
+"""
+
+        # standard use of a runpath attribute
+        python_runpath_manf = """\
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.runpath=opt:$PKGDEPEND_RUNPATH:dummy_directory
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest which has an empty runpath (which is zany) - we will
+        # throw an error here and want to test for it
+        python_empty_runpath_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.runpath=""
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest which has a broken runpath
+        python_invalid_runpath_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.runpath=foo pkg.depend.runpath=bar pkg.depend.runpath=opt
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest which needs a runpath in order to generate deps properly
+        python_invalid_runpath2_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.runpath=$PKGDEPEND_RUNPATH:foo:$PKGDEPEND_RUNPATH
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+
+        # a manifest that bypasses two files and sets a runpath
+        python_bypass_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.bypass-generate=opt/pkgdep_runpath/pdtest.py \
+    pkg.depend.bypass-generate=usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtestmodule.so \
+    pkg.depend.runpath=opt:$PKGDEPEND_RUNPATH
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest that generates a single dependency, which we want to
+        # bypass
+        ksh_bypass_manf = """
+file NOHASH group=sys mode=055 owner=root path=%(script_path)s \
+    pkg.depend.bypass-generate=usr/bin/ksh
+""" % paths
+
+        # a manifest that generates a single dependency, which we want to
+        # bypass.  Specifying just the filename means we should bypass all
+        # paths to that filename (we implicitly add ".*/")
+        ksh_bypass_filename_manf = """
+file NOHASH group=sys mode=055 owner=root path=%(script_path)s \
+    pkg.depend.bypass-generate=ksh
+""" % paths
+
+        # a manifest that generates a single dependency, which we want to
+        # bypass, duplicating the value
+        ksh_bypass_dup_manf = """
+file NOHASH group=sys mode=055 owner=root path=%(script_path)s \
+    pkg.depend.bypass-generate=usr/bin/ksh \
+    pkg.depend.bypass-generate=usr/bin/ksh
+""" % paths
+
+        # a manifest that declares bypasses, none of which match the
+        # dependences we generate
+        python_bypass_nomatch_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.bypass-generate=cats \
+    pkg.depend.bypass-generate=dogs \
+    pkg.depend.runpath=$PKGDEPEND_RUNPATH:opt
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest which uses a wildcard to bypass all dependency generation
+        python_wildcard_bypass_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.bypass-generate=.* \
+    pkg.depend.runpath=$PKGDEPEND_RUNPATH:opt
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest which uses a file wildcard to bypass generation
+        python_wildcard_file_bypass_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.bypass-generate=opt/pkgdep_runpath/.* \
+    pkg.depend.runpath=$PKGDEPEND_RUNPATH:opt
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest which uses a dir wildcard to bypass generation
+        python_wildcard_dir_bypass_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.bypass-generate=pdtest.py \
+    pkg.depend.bypass-generate=pdtest.pyc \
+    pkg.depend.bypass-generate=pdtest.pyo \
+    pkg.depend.bypass-generate=pdtest.so \
+    pkg.depend.bypass-generate=pdtestmodule.so \
+    pkg.depend.runpath=$PKGDEPEND_RUNPATH:opt
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
+        # a manifest which uses a combination of directory, file and normal
+        # bypass entries
+        python_wildcard_combo_bypass_manf = """
+file NOHASH group=sys mode=0755 owner=root path=%(bypass_path)s \
+    pkg.depend.bypass-generate=pdtest.py \
+    pkg.depend.bypass-generate=usr/lib/python2.6/vendor-packages/.* \
+    pkg.depend.bypass-generate=usr/lib/python2.6/site-packages/pkgdep_runpath/pdtestmodule.so \
+    pkg.depend.runpath=$PKGDEPEND_RUNPATH:opt
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_path)s
+file NOHASH group=sys mode=0755 owner=root path=%(runpath_mod_test_path)s
+""" % paths
+
         def setUp(self):
                 pkg5unittest.Pkg5TestCase.setUp(self)
 
@@ -709,6 +845,13 @@
                     "#!/usr/bin/python")
                 self.make_proto_text_file("%s/cProfile.py" % pdir,
                     self.python_module_text)
+                # install these in non-sys.path locations
+                self.make_proto_text_file(self.paths["bypass_path"],
+                    self.python_bypass_text)
+                self.make_proto_text_file(self.paths["runpath_mod_path"],
+                    "#!/usr/bin/python2.6")
+                self.make_proto_text_file(self.paths["runpath_mod_test_path"],
+                    "#!/usr/bin/python2.6")
                 
 	def make_smf_test_files(self):
                 for manifest in self.smf_paths.keys():
@@ -799,6 +942,7 @@
                 self.assertEqual(d.dep_key(), self.__path_to_key(
                     self.paths["syslog_path"]))
                 self.assertEqual(d.action.attrs["path"], "usr/foo")
+                self.assert_(dependencies.is_file_dependency(d))
 
         def test_ext_script(self):
                 """Check that a file that starts with #! and references a file
@@ -858,6 +1002,7 @@
                 for d in ds:
                         self.assert_(d.is_error())
                         self.assert_(d.dep_vars.is_satisfied())
+                        self.assert_(dependencies.is_file_dependency(d))
                         if d.dep_key() == self.__path_to_key(
                             self.paths["ksh_path"]):
                                 self.assertEqual(d.action.attrs["path"],
@@ -892,6 +1037,7 @@
                         self.assertEqual(
                                 d.action.attrs["path"],
                                 self.paths["curses_path"])
+                        self.assert_(dependencies.is_file_dependency(d))
 
                 t_path = self.make_manifest(self.ext_elf_manf)
                 self.make_elf(self.paths["curses_path"])
@@ -923,6 +1069,7 @@
                             self.__path_to_key(self.paths["libc_path"]))
                         self.assertEqual(d.action.attrs["path"],
                             self.paths["curses_path"])
+                        self.assert_(dependencies.is_file_dependency(d))
 
                 t_path = self.make_manifest(self.int_elf_manf)
                 self.make_elf(self.paths["curses_path"])
@@ -946,10 +1093,12 @@
 
                 def _check_all_res(res):
                         ds, es, ms, pkg_attrs = res
-                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo"]
+                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo",
+                            ".so", "module.so"]
                         mod_names = ["foobar", "misc_test", "os",
-                            "search_storage"]
-                        pkg_names = ["indexer_test", "pkg", "pkg_test"]
+                            "search_storage", "minidom"]
+                        pkg_names = ["indexer_test", "pkg", "pkg_test", "xml",
+                            "dom"]
                         expected_deps = set([("python",)] +
                             [tuple(sorted([
                                 "%s%s" % (n,s) for s in mod_suffs
@@ -995,9 +1144,12 @@
 
                 def _check_all_res(res):
                         ds, es, ms, pkg_attrs = res
-                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo"]
-                        mod_names = ["foobar", "os", "search_storage"]
-                        pkg_names = ["indexer_test", "pkg", "pkg_test"]
+                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo",
+                            ".so", "module.so"]
+                        mod_names = ["foobar", "os", "search_storage",
+                            "minidom"]
+                        pkg_names = ["indexer_test", "pkg", "pkg_test", "xml",
+                            "dom"]
                         expected_deps = set([("python",)] +
                             [tuple(sorted([
                                 "%s%s" % (n,s) for s in mod_suffs
@@ -1047,10 +1199,18 @@
 
                 def _check_all_res(res):
                         ds, es, ms, pkg_attrs = res
-                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo"]
+                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo",
+                            ".so", "module.so"]
                         mod_names = ["foobar", "misc_test", "os",
-                            "search_storage"]
-                        pkg_names = ["indexer_test", "pkg", "pkg_test"]
+                            "search_storage", "minidom"]
+                        pkg_names = ["indexer_test", "pkg", "pkg_test",
+                            "xml", "dom"]
+
+                        # for a multi-level import, we should have the correct
+                        # dir suffixes generated for the pkg.debug.depend.paths
+                        path_suffixes = {"minidom.py": "xml/dom",
+                            "dom/__init__.py": "xml"}
+
                         expected_deps = set([("python",)] +
                             [tuple(sorted([
                                 "%s%s" % (n,s) for s in mod_suffs
@@ -1072,6 +1232,22 @@
                                         raise RuntimeError("Got this "
                                             "unexpected dep:%s\n\nd:%s" %
                                             (d.dep_key()[0], d))
+
+                                # check the suffixes generated in our
+                                # pkg.debug.depend.path
+                                for bn in d.base_names:
+                                        if bn not in path_suffixes:
+                                                continue
+
+                                        suffix = path_suffixes[bn]
+                                        for p in d.run_paths:
+                                                self.assert_(
+                                                    p.endswith(suffix),
+                                                    "suffix %s not found in "
+                                                    "paths for %s: %s" %
+                                                    (suffix, bn, " ".join(
+                                                    d.run_paths)))
+
                                 expected_deps.remove(d.dep_key()[0])
                                 self.assertEqual(d.action.attrs["path"],
                                         self.paths["pkg_path"])
@@ -1383,6 +1559,7 @@
                             self.__path_to_key(self.paths["libc_path"]))
                         self.assertEqual(d.action.attrs["path"],
                             self.paths["curses_path"])
+                        self.assert_(dependencies.is_file_dependency(d))
 
                 t_path = self.make_manifest(self.int_elf_manf)
                 self.make_elf(os.path.join("foo", self.paths["curses_path"]))
@@ -1477,10 +1654,12 @@
 
                 def _py_check_all_res(res):
                         ds, es, ms, pkg_attrs = res
-                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo"]
+                        mod_suffs = ["/__init__.py", ".py", ".pyc", ".pyo",
+                            ".so", "module.so"]
                         mod_names = ["foobar", "misc_test", "os",
-                            "search_storage"]
-                        pkg_names = ["indexer_test", "pkg", "pkg_test"]
+                            "search_storage", "minidom"]
+                        pkg_names = ["indexer_test", "pkg", "pkg_test",
+                            "xml", "dom"]
                         expected_deps = set([("python",)] +
                             [tuple(sorted([
                                 "%s%s" % (n,s) for s in mod_suffs
@@ -1631,6 +1810,7 @@
                     convert=False)
                 self.assert_(len(ds) == 0, "Expected 0 dependencies, got %s" %
                     len(ds))
+                self.assert_(dependencies.is_file_dependency(d))
 
 
         def test_ext_smf_manifest(self):
@@ -1748,6 +1928,322 @@
                     self.smf_fmris["service_unknown"],
                     "faildeps_smf_manf")
 
+        def test_runpath_1(self):
+                """Test basic functionality of runpaths."""
+
+                t_path = self.make_manifest(self.python_runpath_manf)
+                self.make_python_test_files(2.6)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+                self.assert_(es==[], "Unexpected errors reported: %s" % es)
+
+                for dep in ds:
+                        # only interested in seeing that our runpath was changed
+                        if "pdtest.py" in dep.attrs["pkg.debug.depend.file"]:
+                                self.assert_("opt/pkgdep_runpath" in
+                                    dep.attrs["pkg.debug.depend.path"])
+                                self.assert_("usr/lib/python2.6/pkgdep_runpath"
+                                    in dep.attrs["pkg.debug.depend.path"])
+                                # ensure this dependency was indeed generated
+                                # as a result of our test file
+                                self.assert_("pkgdep_test/file.py" in
+                                    dep.attrs["pkg.debug.depend.reason"])
+                        self.assert_(dependencies.is_file_dependency(dep))
+
+        def test_runpath_2(self):
+                """Test invalid runpath attributes."""
+
+                self.make_python_test_files(2.6)
+
+                # test a runpath with multiple values
+                t_path = self.make_manifest(self.python_invalid_runpath_manf)
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+                self.assert_(es != [], "No errors reported for broken runpath")
+
+                # test a runpath with multiple $PD_DEFAULT_RUNPATH components
+                t_path = self.make_manifest(self.python_invalid_runpath2_manf)
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+                self.assert_(es != [], "No errors reported for broken runpath")
+
+        def test_runpath_3(self):
+                """Test setting an empty runpath attribute"""
+
+                t_path = self.make_manifest(self.python_empty_runpath_manf)
+                self.make_python_test_files(2.6)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+                self.assert_(es != [], "No errors reported for empty runpath")
+
+        def validate_bypass_dep(self, dep):
+                """Given a dependency which may be bypassed, if it has been,
+                it should have been expanded into a dependency containing just
+                pkg.debug.depend.fullpath entries.
+                """
+                self.assert_(dependencies.is_file_dependency(dep))
+
+                if dep.attrs.get("pkg.debug.depend.fullpath", None):
+                        for val in ["path", "file"]:
+                                self.assert_("pkg.debug.depend.%s" % val
+                                    not in dep.attrs, "We should not see a %s "
+                                    "entry in this dependency: %s" %
+                                    (val, dep))
+                                self.assert_(not dep.run_paths,
+                                    "Unexpected run_paths: %s" % dep)
+                                self.assert_(not dep.base_names,
+                                    "Unexpected base_names: %s" % dep)
+                else:
+                        self.assert_("pkg.debug.depend.fullpath" not in
+                            dep.attrs, "We should not see a fullpath "
+                            "entry in this dependency: %s" % dep)
+                        self.assert_(not dep.full_paths,
+                            "Unexpected full_paths: %s" % dep)
+
+        def verify_bypass(self, ds, es, bypass):
+                """Given a list of dependencies, and a list of bypass paths,
+                verify that we have not generated a dependency on any of the
+                items in the bypass list.
+
+                If a bypass has been performed, the dependency will have been
+                expanded to contain pkg.debug.depend.fullpath values,
+                otherwise we should have p.d.d.path and p.d.d.file items.
+                We should never have all three attributes set.
+                """
+
+                self.assert_(len(es) == 0, "Errors reported during bypass: %s" %
+                    es)
+
+                for dep in ds:
+                        # generate all possible paths this dep could represent
+                        dep_paths = set()
+                        self.validate_bypass_dep(dep)
+                        if dep.attrs.get("pkg.debug.depend.fullpath", None):
+                                dep_paths.update(
+                                    dep.attrs["pkg.debug.depend.fullpath"])
+                        else:
+                                for filename in dep.base_names:
+                                        dep_paths.update([os.path.join(dir,
+                                            filename)
+                                            for dir in dep.run_paths])
+
+                        self.assert_(dependencies.is_file_dependency(dep))
+
+                        # finally, check the dependencies
+                        if dep_paths.intersection(set(bypass)):
+                                self.debug("Some items were not bypassed: %s" %
+                                    "\n".join(sorted(list(
+                                    dep_paths.intersection(set(bypass))))))
+                                return False
+                return True
+
+        def verify_dep_generation(self, ds, expected):
+                """Verifies that we have generated dependencies on the given
+                files"""
+                dep_paths = set()
+                for dep in ds:
+                        self.debug(dep)
+                        self.validate_bypass_dep(dep)
+                        if dep.attrs.get("pkg.debug.depend.fullpath", None):
+                                dep_paths.update(
+                                    dep.attrs["pkg.debug.depend.fullpath"])
+                        else:
+                                # generate all paths this dep could represent
+                                for filename in dep.base_names:
+                                        dep_paths.update([
+                                            os.path.join(dir, filename)
+                                            for dir in dep.run_paths])
+                for item in expected:
+                        if item not in dep_paths:
+                                self.debug("Expected to see dependency on %s" %
+                                    item)
+                                return False
+                return True
+
+        def test_bypass_1(self):
+                """Ensure we can bypass dependency generation on a given file,
+                or set of files
+                """
+                # this manifest should result in multiple dependencies
+                t_path = self.make_manifest(self.python_bypass_manf)
+                self.make_python_test_files(2.6)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+                self.assert_(self.verify_bypass(ds, es, [
+                    "opt/pkgdep_runpath/pdtest.py",
+                    "usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtestmodule.so"]),
+                    "Python script was not bypassed")
+                # now check we depend on some files which should not have been
+                # bypassed
+                self.assert_(self.verify_dep_generation(ds,
+                    ["usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtest.so",
+                    "usr/lib/python2.6/plat-sunos5/pkgdep_runpath/pdtest/__init__.py",
+                    "usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtest.py",
+                    "opt/pkgdep_runpath/pdtest.pyc"]))
+
+                # now run this again as a control, this time skipping bypass
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False, ignore_bypass=True)
+                # the first two items in the list were previously bypassed
+                self.assert_(self.verify_dep_generation(ds,
+                    ["opt/pkgdep_runpath/pdtest.py",
+                    "usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtestmodule.so",
+                    "usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtest.so",
+                    "usr/lib/python2.6/plat-sunos5/pkgdep_runpath/pdtest/__init__.py",
+                    "opt/pkgdep_runpath/pdtest.pyc"]),
+                    "Python script did not generate a dependency on bypassed")
+
+                self.make_proto_text_file(self.paths["script_path"],
+                    self.script_text)
+
+                # these manifests should only generate 1 dependency
+                # we also test that duplicated bypass entries are ignored
+                for manifest in [self.ksh_bypass_manf,
+                    self.ksh_bypass_dup_manf, self.ksh_bypass_filename_manf]:
+                        t_path = self.make_manifest(manifest)
+
+                        ds, es, ms, pkg_attrs = \
+                            dependencies.list_implicit_deps(t_path,
+                            [self.proto_dir], {}, [],
+                            remove_internal_deps=False, convert=False)
+                        self.assert_(len(ds) == 0,
+                            "Did not generate exactly 0 dependencies")
+                        self.assert_(self.verify_bypass(ds, es,
+                            ["usr/bin/ksh"]), "Ksh script was not bypassed")
+
+                        # don't perform bypass
+                        ds, es, ms, pkg_attrs = \
+                            dependencies.list_implicit_deps(t_path,
+                            [self.proto_dir], {}, [],
+                            remove_internal_deps=False, convert=False,
+                            ignore_bypass=True)
+                        self.assert_(len(ds) == 1,
+                            "Did not generate exactly 1 dependency on ksh")
+                        self.assert_(self.verify_dep_generation(
+                            ds, ["usr/bin/ksh"]),
+                            "Ksh script did not generate a dependency on ksh")
+
+        def test_bypass_2(self):
+                """Ensure that bypasses containing wildcards work"""
+                t_path = self.make_manifest(self.python_wildcard_bypass_manf)
+                self.make_python_test_files(2.6)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+
+                self.assert_(len(es) == 0, "Errors reported during bypass: %s" %
+                    es)
+
+                # we should have bypassed all dependency generation on all files
+                self.assert_(len(ds) == 0, "Generated dependencies despite "
+                    "request to bypass all dependency generation.")
+
+                t_path = self.make_manifest(
+                    self.python_wildcard_dir_bypass_manf)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+
+                self.assert_(self.verify_bypass(ds, es, [
+                    "usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtest.pyo",
+                    "usr/lib/python2.6/lib-old/pkgdep_runpath/pdtestmodule.so"]),
+                    "Directory bypass wildcard failed")
+                self.assert_(self.verify_dep_generation(ds, [
+                    "usr/lib/python2.6/pkgdep_runpath/__init__.py",
+                    "usr/lib/python2.6/lib-old/pkgdep_runpath/__init__.py"]),
+                    "Failed to generate dependencies, despite dir-wildcards")
+
+                t_path = self.make_manifest(
+                    self.python_wildcard_file_bypass_manf)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+                self.assert_(self.verify_bypass(ds, es, [
+                    "opt/pkgdep_runpath/pdtest.pyo",
+                    "opt/pkgdep_runpath/pdtestmodule.so"]),
+                    "Failed to bypass some paths despite use of file-wildcard")
+                # we should still have dependencies on these
+                self.assert_(self.verify_dep_generation(ds, [
+                    "usr/lib/python2.6/lib-dynload/pkgdep_runpath/pdtest.pyo",
+                    "usr/lib/python2.6/lib-old/pkgdep_runpath/pdtestmodule.so"]),
+                    "Failed to generate dependencies, despite file-wildcards")
+
+                # finally, test a combination of the above, we have:
+                # pkg.depend.bypass-generate=.*/pdtest.py \
+                # pkg.depend.bypass-generate=usr/lib/python2.6/vendor-packages/.* \
+                # pkg.depend.bypass-generate=usr/lib/python2.6/site-packages/pkgdep_runpath/pdtestmodule.so
+                t_path = self.make_manifest(
+                    self.python_wildcard_combo_bypass_manf)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+                self.assert_(self.verify_bypass(ds, es, [
+                    "opt/pkgdep_runpath/pdtest.py",
+                    "usr/lib/python2.6/vendor-packages/pkgdep_runpath/pdtest.py"
+                    "usr/lib/python2.6/site-packages/pkgdep_runpath/pdtest.py",
+                    "usr/lib/python2.6/site-packages/pkgdep_runpath/pdtestmodule.so"]),
+                    "Failed to bypass some paths despite use of combo-wildcard")
+                # we should still have dependencies on these
+                self.assert_(self.verify_dep_generation(ds, [
+                    "usr/lib/python2.6/site-packages/pkgdep_runpath/pdtest.pyc",
+                    "usr/lib/python2.6/lib-old/pkgdep_runpath/pdtestmodule.so"]),
+                    "Failed to generate dependencies, despite file-wildcards")
+
+        def test_bypass_3(self):
+                """Ensure that bypasses which don't match any dependencies have
+                no effect on the computed dependencies."""
+                t_path = self.make_manifest(self.python_bypass_nomatch_manf)
+                self.make_python_test_files(2.6)
+
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
+
+                for dep in ds:
+                        # we expect that there are only file/path attributes
+                        # since no bypasses have been performed
+                        self.assert_("pkg.debug.depend.file" in dep.attrs)
+                        self.assert_("pkg.debug.depend.path" in dep.attrs)
+                        self.assert_("pkg.debug.depend.fullpath"
+                            not in dep.attrs)
+
+                def all_paths(ds):
+                        """Return all paths this list of dependencies could
+                        generate"""
+                        dep_paths = set()
+                        for dep in ds:
+                                # generate all paths this dep could represent
+                                dep_paths = set()
+                                for filename in dep.base_names + ["*"]:
+                                        dep_paths.update(os.path.join(dir, filename)
+                                            for dir in dep.run_paths + ["*"])
+                                dep_paths.remove("*/*")
+                        return dep_paths
+
+                gen_paths = all_paths(ds)
+
+                # now run again, without trying to perform dependency bypass
+                ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False, ignore_bypass=True)
+
+                self.assert_(gen_paths == all_paths(ds),
+                    "generating dependencies with non-matching bypass entries "
+                    "changed the returned dependencies")
+
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkgdep.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/tests/cli/t_pkgdep.py	Tue Feb 22 10:00:49 2011 +1300
@@ -563,7 +563,9 @@
                     "%(pfx)s.file=indexer.py "
                     "%(pfx)s.file=indexer.pyc "
                     "%(pfx)s.file=indexer.pyo "
-                    "%(pfx)s.file=indexer/__init__.py " +
+                    "%(pfx)s.file=indexer.so "
+                    "%(pfx)s.file=indexer/__init__.py "
+                    "%(pfx)s.file=indexermodule.so " +
                     pkg_path +
                     " %(pfx)s.reason=%(reason)s "
                     "%(pfx)s.type=python type=require\n"
@@ -572,7 +574,10 @@
                     "%(pfx)s.file=misc.py "
                     "%(pfx)s.file=misc.pyc "
                     "%(pfx)s.file=misc.pyo "
-                    "%(pfx)s.file=misc/__init__.py " +
+                    "%(pfx)s.file=misc.so "
+                    "%(pfx)s.file=misc/__init__.py "
+                    "%(pfx)s.file=miscmodule.so " +
+
                     pkg_path +
                     " %(pfx)s.reason=%(reason)s "
                     "%(pfx)s.type=python type=require\n"
@@ -587,7 +592,10 @@
                     "%(pfx)s.file=search_storage.py "
                     "%(pfx)s.file=search_storage.pyc "
                     "%(pfx)s.file=search_storage.pyo "
-                    "%(pfx)s.file=search_storage/__init__.py " +
+                    "%(pfx)s.file=search_storage.so "
+                    "%(pfx)s.file=search_storage/__init__.py "
+                    "%(pfx)s.file=search_storagemodule.so " +
+
                     pkg_path +
                     " %(pfx)s.reason=%(reason)s "
                     "%(pfx)s.type=python type=require\n") % {
@@ -651,7 +659,7 @@
 file NOHASH group=bin mode=0755 owner=root path=var/log/syslog variant.opensolaris.zone=global
 hardlink path=var/log/foobar target=syslog
 """
-        
+
         bug_15958_manf = """\
 set name=variant.opensolaris.zone value=global value=nonglobal
 """ + bug_16808_manf
@@ -2357,6 +2365,24 @@
                 self.assertEqual(d.attrs["fmri"], "pkg:/[email protected]")
                 self.assertEqual(d.attrs[dependencies.type_prefix], "elf")
 
+        def test_multiple_run_paths(self):
+                """Test that specifying multiple $PKGDEPEND_RUNPATH tokens
+                results in an error."""
+
+                mf = """\
+set name=pkg.fmri value=pkg:/[email protected],5.11-0.160
+file NOHASH group=bin mode=0755 owner=root path=etc/file.py \
+    pkg.depend.runpath=$PKGDEPEND_RUNPATH:$PKGDEPEND_RUNPATH
+    """
+                self.make_proto_text_file("etc/file.py", "#!/usr/bin/python2.6")
+                tp = self.make_manifest(mf)
+                self.pkgdepend_generate("-d %s %s" % (self.test_proto_dir, tp),
+                    exit=1)
+                expected = (
+                    "More than one $PKGDEPEND_RUNPATH token was set on the "
+                    "same action in this manifest.")
+                self.check_res(expected, self.errout)
+                self.check_res("", self.output)
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkgdep_resolve.py	Fri Feb 11 14:04:06 2011 -0800
+++ b/src/tests/cli/t_pkgdep_resolve.py	Tue Feb 22 10:00:49 2011 +1300
@@ -182,6 +182,44 @@
 file NOHASH group=bin mode=0755 owner=root path=usr/lib/python2.6/vendor-packages/pkg/client/indexer.py
 depend fmri=__TBD pkg.debug.depend.file=search_storage.py pkg.debug.depend.file=search_storage.pyc pkg.debug.depend.file=search_storage/__init__.py pkg.debug.depend.path=usr/lib/python2.6/pkg pkg.debug.depend.path=usr/lib/python2.6/lib-dynload/pkg pkg.debug.depend.path=usr/lib/python2.6/lib-old/pkg pkg.debug.depend.path=usr/lib/python2.6/lib-tk/pkg pkg.debug.depend.path=usr/lib/python2.6/plat-sunos5/pkg pkg.debug.depend.path=usr/lib/python2.6/site-packages/pkg pkg.debug.depend.path=usr/lib/python2.6/vendor-packages/pkg pkg.debug.depend.path=usr/lib/python2.6/vendor-packages/gst-0.10/pkg pkg.debug.depend.path=usr/lib/python2.6/vendor-packages/gtk-2.0/pkg pkg.debug.depend.path=usr/lib/python26.zip/pkg pkg.debug.depend.reason=usr/lib/python2.6/vendor-packages/pkg/client/indexer.py pkg.debug.depend.type=python type=require
 """
+
+        multi_file_dep_fullpath_manf = """\
+file NOHASH group=bin mode=0755 owner=root path=usr/lib/python2.6/vendor-packages/pkg/client/indexer.py
+depend fmri=__TBD pkg.debug.depend.file=search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-dynload/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-old/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-tk/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/plat-sunos5/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/site-packages/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/gst-0.10/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/gtk-2.0/pkg/search_storage.py \
+        pkg.debug.depend.fullpath=usr/lib/python26.zip/pkg/search_storage.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-dynload/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-old/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-tk/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/plat-sunos5/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/site-packages/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/gst-0.10/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/gtk-2.0/pkg/search_storage.pyc \
+    pkg.debug.depend.fullpath=usr/lib/python26.zip/pkg/search_storage.pyc \
+        pkg.debug.depend.fullpath=usr/lib/python2.6/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-dynload/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-old/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/lib-tk/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/plat-sunos5/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/site-packages/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/gst-0.10/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python2.6/vendor-packages/gtk-2.0/pkg/search_storage/__init__.py \
+    pkg.debug.depend.fullpath=usr/lib/python26.zip/pkg/search_storage/__init__.py \
+    pkg.debug.depend.reason=usr/lib/python2.6/vendor-packages/pkg/client/indexer.py \
+    pkg.debug.depend.type=python type=require
+"""
+
         multi_file_sat_both = """\
 set name=fmri value=pkg:/sat_both
 file NOHASH group=bin mode=0755 owner=root path=usr/lib/python2.6/vendor-packages/pkg/search_storage.py
@@ -473,6 +511,9 @@
                 self.assertEqual(vnums, set())
 
         def test_multi_file_dependencies(self):
+                """This checks manifests with multiple files, both with
+                pkg.debug.depend.file/path combinations, as well as
+                with pkg.debug.depend.fullpath lists."""
                 def __check_results(pkg_deps, errs, exp_pkg, no_deps, one_dep):
                         if errs:
                                 raise RuntimeError("Got the following "
@@ -484,16 +525,18 @@
                                 raise RuntimeError("Got more than one "
                                     "dependency:\n%s" %
                                     "\n".join(
-                                        [str(d) for d in pkg_deps[col_path]]))
+                                        [str(d) for d in pkg_deps[one_dep]]))
                         d = pkg_deps[one_dep][0]
                         self.assertEqual(d.attrs["fmri"], exp_pkg)
 
                 col_path = self.make_manifest(self.multi_file_dep_manf)
-                # This manifest provides two files that satisfy col_path's
+                # the following manifest is logically equivalent to col_path
+                col_fullpath_path = self.make_manifest(self.multi_file_dep_manf)
+                # This manifest provides two files that satisfy col*_path's
                 # file dependencies.
                 both_path = self.make_manifest(self.multi_file_sat_both)
                 # This manifest provides a file that satisfies the dependency
-                # in col_path by delivering a py or pyc file..
+                # in col*_path by delivering a py or pyc file..
                 py_path = self.make_manifest(self.multi_file_sat_py)
                 pyc_path = self.make_manifest(self.multi_file_sat_pyc)
 
@@ -501,61 +544,62 @@
                 # package delivers both files which could satisfy the dependency
                 # or only one package which delivers the dependency is being
                 # resolved against.
-                pkg_deps, errs = dependencies.resolve_deps(
-                    [col_path, both_path], self.api_obj)
-                __check_results(pkg_deps, errs, "pkg:/sat_both", both_path,
-                    col_path)
+                for mf_path in [col_path, col_fullpath_path]:
+                        pkg_deps, errs = dependencies.resolve_deps(
+                            [mf_path, both_path], self.api_obj)
+                        __check_results(pkg_deps, errs, "pkg:/sat_both", both_path,
+                            mf_path)
 
-                pkg_deps, errs = dependencies.resolve_deps(
-                    [col_path, py_path], self.api_obj)
-                __check_results(pkg_deps, errs, "pkg:/sat_py", py_path,
-                    col_path)
+                        pkg_deps, errs = dependencies.resolve_deps(
+                            [mf_path, py_path], self.api_obj)
+                        __check_results(pkg_deps, errs, "pkg:/sat_py", py_path,
+                            mf_path)
 
-                pkg_deps, errs = dependencies.resolve_deps(
-                    [col_path, pyc_path], self.api_obj)
-                __check_results(pkg_deps, errs, "pkg:/sat_pyc", pyc_path,
-                    col_path)
+                        pkg_deps, errs = dependencies.resolve_deps(
+                            [mf_path, pyc_path], self.api_obj)
+                        __check_results(pkg_deps, errs, "pkg:/sat_pyc", pyc_path,
+                            mf_path)
 
-                # This resolution should fail because files which satisfy the
-                # dependency are delivered in two packages.
-                pkg_deps, errs = dependencies.resolve_deps(
-                    [col_path, py_path, pyc_path], self.api_obj)
-                self.assertEqual(len(pkg_deps), 3)
-                for k in pkg_deps:
-                        if pkg_deps[k]:
-                                raise RuntimeError("Got the following "
-                                    "unexpected dependencies:\n%s" %
-                                    "\n".join(["%s\n%s" %
-                                        (k,"\n".join([
-                                            "\t%s" % d for d in pkg_deps[k]]))
-                                            for k in pkg_deps
-                                        ]))
-                if len(errs) != 2:
-                        raise RuntimeError("Didn't get two errors:\n%s" %
-                            "\n".join(str(e) for e in errs))
-                for e in errs:
-                        if isinstance(e,
-                            dependencies.MultiplePackagesPathError):
-                                for d in e.res:
-                                        if d.attrs["fmri"] not in \
-                                            ("pkg:/sat_py",
-                                            "pkg:/sat_pyc"):
-                                                raise RuntimeError("Unexpected "
-                                                    "dependency action:%s" % d)
-                                self.assertEqual(
-                                    e.source.attrs["%s.file" % self.depend_dp],
-                                    ["search_storage.py", "search_storage.pyc",
-                                    "search_storage/__init__.py"])
-                        elif isinstance(e,
-                            dependencies.UnresolvedDependencyError):
-                                self.assertEqual(e.path, col_path)
-                                self.assertEqual(
-                                    e.file_dep.attrs[
-                                        "%s.file" % self.depend_dp],
-                                    ["search_storage.py", "search_storage.pyc",
-                                    "search_storage/__init__.py"])
-                        else:
-                                raise RuntimeError("Unexpected error:%s" % e)
+                        # This resolution should fail because files which satisfy the
+                        # dependency are delivered in two packages.
+                        pkg_deps, errs = dependencies.resolve_deps(
+                            [mf_path, py_path, pyc_path], self.api_obj)
+                        self.assertEqual(len(pkg_deps), 3)
+                        for k in pkg_deps:
+                                if pkg_deps[k]:
+                                        raise RuntimeError("Got the following "
+                                            "unexpected dependencies:\n%s" %
+                                            "\n".join(["%s\n%s" %
+                                                (k,"\n".join([
+                                                    "\t%s" % d for d in pkg_deps[k]]))
+                                                    for k in pkg_deps
+                                                ]))
+                        if len(errs) != 2:
+                                raise RuntimeError("Didn't get two errors:\n%s" %
+                                    "\n".join(str(e) for e in errs))
+                        for e in errs:
+                                if isinstance(e,
+                                    dependencies.MultiplePackagesPathError):
+                                        for d in e.res:
+                                                if d.attrs["fmri"] not in \
+                                                    ("pkg:/sat_py",
+                                                    "pkg:/sat_pyc"):
+                                                        raise RuntimeError("Unexpected "
+                                                            "dependency action:%s" % d)
+                                        self.assertEqual(
+                                            e.source.attrs["%s.file" % self.depend_dp],
+                                            ["search_storage.py", "search_storage.pyc",
+                                            "search_storage/__init__.py"])
+                                elif isinstance(e,
+                                    dependencies.UnresolvedDependencyError):
+                                        self.assertEqual(e.path, mf_path)
+                                        self.assertEqual(
+                                            e.file_dep.attrs[
+                                                "%s.file" % self.depend_dp],
+                                            ["search_storage.py", "search_storage.pyc",
+                                            "search_storage/__init__.py"])
+                                else:
+                                        raise RuntimeError("Unexpected error:%s" % e)
 
 
         def test_bug_11518(self):