15958 generate gets partially satisfied internal dependencies wrong
authorBrock Pytlik <brock.pytlik@oracle.com>
Tue, 28 Sep 2010 15:55:54 -0700
changeset 2091 824491c11ff3
parent 2090 d84a7b3cafa3
child 2092 0ef66bf272d3
15958 generate gets partially satisfied internal dependencies wrong 16807 VariantSets is the wrong abstraction 16808 pkgdepend generate should complain if a package's variants don't make sense
src/modules/actions/generic.py
src/modules/actions/signature.py
src/modules/flavor/base.py
src/modules/flavor/elf.py
src/modules/lint/base.py
src/modules/lint/pkglint_action.py
src/modules/lint/pkglint_manifest.py
src/modules/manifest.py
src/modules/publish/dependencies.py
src/modules/variant.py
src/pkgdep.py
src/tests/api/t_dependencies.py
src/tests/api/t_pkglint.py
src/tests/api/t_variant.py
src/tests/cli/t_pkgdep.py
src/tests/cli/t_pkgdep_resolve.py
src/util/publish/pkgdiff.py
--- a/src/modules/actions/generic.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/actions/generic.py	Tue Sep 28 15:55:54 2010 -0700
@@ -530,8 +530,11 @@
                                 facets.append(k)
                 return variants, facets
 
-        def get_variants(self):
-                return variant.VariantSets(dict((
+        def get_variant_template(self):
+                """Return the VariantCombinationTemplate that the variant tags
+                of this action define."""
+
+                return variant.VariantCombinationTemplate(dict((
                     (v, self.attrs[v]) for v in self.get_varcet_keys()[0]
                 )))
 
--- a/src/modules/actions/signature.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/actions/signature.py	Tue Sep 28 15:55:54 2010 -0700
@@ -206,7 +206,7 @@
                 actions are not handled yet, it also returns False in that
                 case."""
 
-                return self.hash is not None and not self.get_variants()
+                return self.hash is not None and not self.get_variant_template()
 
         @staticmethod
         def decompose_sig_alg(val):
@@ -241,8 +241,8 @@
                 # If this signature is tagged with variants, if the version is
                 # higher than one we know about, or it uses an unrecognized
                 # hash algorithm, we can't handle it yet.
-                if self.get_variants() or ver > generic.Action.sig_version or \
-                    not self.hash_alg:
+                if self.get_variant_template() or \
+                    ver > generic.Action.sig_version or not self.hash_alg:
                         return None
                 # Turning this into a list makes debugging vastly more
                 # tractable.
--- a/src/modules/flavor/base.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/flavor/base.py	Tue Sep 28 15:55:54 2010 -0700
@@ -79,7 +79,7 @@
                 self.action = action
                 self.pkg_vars = pkg_vars
                 self.proto_dir = proto_dir
-                self.dep_vars = variant.VariantSets(action.get_variants())
+                self.dep_vars = self.get_variant_combinations()
 
                 attrs.update([
                     ("fmri", self.DUMMY_FMRI),
@@ -87,9 +87,7 @@
                     ("%s.reason" % self.DEPEND_DEBUG_PREFIX, self.action_path())
                 ])
 
-                if self.dep_vars is not None:
-                        attrs.update(self.dep_vars)
-
+                attrs.update(action.get_variant_template())
                 depend.DependencyAction.__init__(self, **attrs)
 
         def is_error(self):
@@ -106,23 +104,17 @@
                     "implement dep_key. Current class is %s") %
                     self.__class__.__name__)
 
-        def get_var_diff(self, ext_vars):
-                """Find the difference of the set of variants declared for the
-                action that produced this dependency, and another set of
-                variants."""
+        def get_variant_combinations(self, satisfied=False):
+                """Create the combinations of variants that this action
+                satisfies or needs satisfied.
 
-                vars = variant.VariantSets(self.action.get_variants())
-                for k in self.pkg_vars:
-                        if k not in vars:
-                                vars[k] = self.pkg_vars[k]
-                return vars.difference(ext_vars)
+                'satisfied' determines whether the combination produced is
+                satisfied or unsatisfied."""
 
-        def get_var_set(self):
-                vars = variant.VariantSets(self.action.get_variants())
-                for k in self.pkg_vars:
-                        if k not in vars:
-                                vars[k] = self.pkg_vars[k]
-                return vars
+                variants = self.action.get_variant_template()
+                variants.merge_unknown(self.pkg_vars)
+                return variant.VariantCombinations(variants,
+                    satisfied=satisfied)
 
         def action_path(self):
                 """Return the path to the file that generated this dependency.
@@ -169,7 +161,7 @@
         def __init__(self, action, base_names, run_paths, pkg_vars, proto_dir,
             kind):
                 """Construct a PublishingDependency object.
-  
+
                 'action' is the action which produced this dependency.
 
                 'base_names' is the list of files of the dependency.
@@ -261,7 +253,7 @@
                 more information for their implementations. See pkg.flavor.elf
                 for an example of this."""
 
-                missing_vars = self.get_var_set()
+                missing_vars = self.get_variant_combinations()
                 for p in self.possibly_delivered(delivered_files):
                         missing_vars.mark_as_satisfied(delivered_files[p])
                         if missing_vars.is_satisfied():
--- a/src/modules/flavor/elf.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/flavor/elf.py	Tue Sep 28 15:55:54 2010 -0700
@@ -98,8 +98,10 @@
                         self.err_type = self.WARNING
                         self.attrs["%s.severity" % self.DEPEND_DEBUG_PREFIX] =\
                             "warning"
-                        return self.WARNING, self.get_var_diff(
+                        missing_vars = self.get_variant_combinations()
+                        missing_vars.mark_as_satisfied(
                             delivered_base_names[self.base_names[0]])
+                        return self.WARNING, missing_vars
                 else:
                         return err, vars
 
--- a/src/modules/lint/base.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/lint/base.py	Tue Sep 28 15:55:54 2010 -0700
@@ -26,6 +26,8 @@
 
 import inspect
 
+import pkg.variant as variant
+
 class LintException(Exception):
         """An exception thrown when something fatal has gone wrong during
         the linting."""
@@ -92,7 +94,7 @@
         def shutdown(self, engine):
                 pass
 
-        def conflicting_variants(self, actions):
+        def conflicting_variants(self, actions, pkg_vars):
                 """Given a set of actions, determine that none of the actions
                 have matching variant values for any variant."""
 
@@ -108,19 +110,23 @@
                 # The comparison is commutative.
                 for i in range(0, len(action_list)):
                         action = action_list[i]
-                        var = action.get_variants()
+                        var = action.get_variant_template()
                         # if we don't declare any variants on a given
                         # action, then it's automatically a conflict
                         if len(var) == 0:
                                 conflicts = True
+                        vc = variant.VariantCombinations(var, True)
                         for j in range(i + 1, len(action_list)):
                                 cmp_action = action_list[j]
-                                cmp_var = cmp_action.get_variants()
-                                if var.intersects(cmp_var):
-                                        intersection = var.intersection(cmp_var)
-                                        for k in intersection:
-                                                if len(var[k]) != 0:
-                                                        conflicts = True
+                                cmp_var = variant.VariantCombinations(
+                                    cmp_action.get_variant_template(), True)
+                                if vc.intersects(cmp_var):
+                                        intersection = vc.intersection(cmp_var)
+                                        intersection.simplify(pkg_vars,
+                                            assert_on_different_domains=False)
+                                        conflicts = True
+                                        for k in intersection.sat_set:
+                                                if len(k) != 0:
                                                         conflict_vars.add(k)
                 return conflicts, conflict_vars
 
--- a/src/modules/lint/pkglint_action.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/lint/pkglint_action.py	Tue Sep 28 15:55:54 2010 -0700
@@ -121,14 +121,9 @@
                                     action.attrs["pkg.linted"].lower() == "true":
                                         continue
 
-                                variants = action.get_variants()
+                                variants = action.get_variant_template()
                                 variants.merge_unknown(pkg_vars)
-                                for v in variants:
-                                        # we're only interested in variants with
-                                        # 1 value, so using pop() is safe.
-                                        if len(variants[v]) == 1:
-                                                action.attrs[v] = \
-                                                    variants[v].pop()
+                                action.attrs.update(variants)
 
                                 p = action.attrs[attr]
                                 if p not in dic:
@@ -203,7 +198,8 @@
                 """Checks for duplicate paths on non-ref-counted actions."""
 
                 self.dup_attr_check(["file", "license"], "path", self.ref_paths,
-                    self.processed_paths, action, engine, msgid=pkglint_id)
+                    self.processed_paths, action, engine,
+                    manifest.get_all_variants(), msgid=pkglint_id)
 
         duplicate_paths.pkglint_desc = _(
             "Paths should be unique.")
@@ -212,7 +208,8 @@
                 """Checks for duplicate driver names."""
 
                 self.dup_attr_check(["driver"], "name", self.ref_drivers,
-                    self.processed_drivers, action, engine, msgid=pkglint_id)
+                    self.processed_drivers, action, engine,
+                    manifest.get_all_variants(), msgid=pkglint_id)
 
         duplicate_drivers.pkglint_desc = _("Driver names should be unique.")
 
@@ -221,7 +218,8 @@
                 """Checks for duplicate user names."""
 
                 self.dup_attr_check(["user"], "username", self.ref_usernames,
-                    self.processed_usernames, action, engine, msgid=pkglint_id)
+                    self.processed_usernames, action, engine,
+                    manifest.get_all_variants(), msgid=pkglint_id)
 
         duplicate_usernames.pkglint_desc = _("User names should be unique.")
 
@@ -229,7 +227,8 @@
                 """Checks for duplicate uids."""
 
                 self.dup_attr_check(["user"], "uid", self.ref_uids,
-                    self.processed_uids, action, engine, msgid=pkglint_id)
+                    self.processed_uids, action, engine,
+                    manifest.get_all_variants(), msgid=pkglint_id)
 
         duplicate_uids.pkglint_desc = _("UIDs should be unique.")
 
@@ -238,7 +237,8 @@
                 """Checks for duplicate group names."""
 
                 self.dup_attr_check(["group"], "groupname", self.ref_groupnames,
-                    self.processed_groupnames, action, engine, msgid=pkglint_id)
+                    self.processed_groupnames, action, engine,
+                    manifest.get_all_variants(), msgid=pkglint_id)
 
         duplicate_groupnames.pkglint_desc = _(
             "Group names should be unique.")
@@ -247,7 +247,8 @@
                 """Checks for duplicate gids."""
 
                 self.dup_attr_check(["group"], "name", self.ref_gids,
-                    self.processed_gids, action, engine, msgid=pkglint_id)
+                    self.processed_gids, action, engine,
+                    manifest.get_all_variants(), msgid=pkglint_id)
 
         duplicate_gids.pkglint_desc = _("GIDs should be unique.")
 
@@ -274,20 +275,18 @@
                 for (pfmri, a) in self.ref_paths[p]:
                         fmris.add(pfmri)
                         for key in a.differences(target):
-
+                                # target, used in link actions often differs
+                                # between variants of those actions.
+                                if key.startswith("variant") or \
+                                    key.startswith("facet") or \
+                                    key.startswith("target"):
+                                        continue
                                 conflicting_vars, variants = \
-                                    self.conflicting_variants([a, target])
+                                    self.conflicting_variants([a, target],
+                                        manifest.get_all_variants())
                                 if not conflicting_vars:
                                         continue
-
-                                if (key.startswith("variant") or \
-                                    key.startswith("facet")):
-                                        pass
-
-                                # target, used in link actions often differs
-                                # between variants of those actions.
-                                elif not key.startswith("target"):
-                                        differences.add(key)
+                                differences.add(key)
                 suspects = []
                 if differences:
                         for key in differences:
@@ -320,7 +319,7 @@
             "Duplicated reference counted actions should have the same attrs.")
 
         def dup_attr_check(self, action_names, attr_name, ref_dic,
-            processed_dic, action, engine, msgid=""):
+            processed_dic, action, engine, pkg_vars, msgid=""):
                 """This method does generic duplicate action checking where
                 we know the type of action and name of an action attributes
                 across actions/manifests that should not be duplicated.
@@ -362,28 +361,34 @@
                         actions.add(a)
                         fmris.add(pfmri)
 
-                has_conflict, conflict_vars = self.conflicting_variants(actions)
+                has_conflict, conflict_vars = self.conflicting_variants(actions,
+                    pkg_vars)
                 if has_conflict:
                         if not conflict_vars:
                                 engine.error(_("%(attr_name)s %(name)s is "
                                     "a duplicate delivered by %(pkgs)s "
-                                    "declaring no variants") %
+                                    "under all variant combinations") %
                                     {"attr_name": attr_name,
                                     "name": name,
                                     "pkgs":
                                     " ".join([f.get_fmri() for f in fmris])},
                                     msgid="%s%s.1" % (self.name, msgid))
                         else:
-                                engine.error(_("%(attr_name)s %(name)s is "
-                                    "a duplicate delivered by %(pkgs)s "
-                                    "declaring overlapping variants %(vars)s") %
-                                    {"attr_name": attr_name,
-                                    "name": name,
-                                    "pkgs":
-                                    " ".join([f.get_fmri() for f in fmris]),
-                                    "vars":
-                                    " ".join([v for v in conflict_vars])},
-                                    msgid="%s%s.2" % (self.name, msgid))
+                                for fz in conflict_vars:
+                                        engine.error(_("%(attr_name)s %(name)s "
+                                            "is a duplicate delivered by "
+                                            "%(pkgs)s declaring overlapping "
+                                            "variants %(vars)s") %
+                                            {"attr_name": attr_name,
+                                            "name": name,
+                                            "pkgs":
+                                            " ".join([f.get_fmri() for f
+                                                in fmris]),
+                                            "vars":
+                                            " ".join(["%s=%s" % (k, v)
+                                                for (k, v)
+                                                in sorted(fz)])},
+                                            msgid="%s%s.2" % (self.name, msgid))
                 processed_dic[name] = True
 
         def duplicate_path_types(self, action, manifest, engine,
@@ -408,7 +413,8 @@
                         fmris.add(pfmri)
                 if len(types) > 1:
                         has_conflict, conflict_vars = \
-                            self.conflicting_variants(actions)
+                            self.conflicting_variants(actions,
+                                manifest.get_all_variants())
                         if has_conflict:
                                 engine.error(
                                     _("path %(path)s is delivered by multiple "
--- a/src/modules/lint/pkglint_manifest.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/lint/pkglint_manifest.py	Tue Sep 28 15:55:54 2010 -0700
@@ -147,6 +147,8 @@
                 undefined_variants = set()
                 has_arch_file = False
 
+                pkg_vars = manifest.get_all_variants()
+
                 for action in manifest.gen_actions():
                         if linted(action):
                                 continue
@@ -155,17 +157,13 @@
                             "elfarch" in action.attrs:
                                 has_arch_file = True
 
-                        for key in action.attrs:
-                                if not key.startswith("variant"):
-                                        continue
-                                val = action.attrs[key]
-                                if key not in manifest:
-                                        undefined_variants.add(key)
-                                else:
-                                        descr = manifest[key]
-                                        if val not in descr:
-                                                unknown_variants.add(
-                                                    "%s=%s" % (key, val))
+                        vct = action.get_variant_template()
+                        diff = vct.difference(pkg_vars)
+                        for k in diff.type_diffs:
+                                undefined_variants.add(k)
+                        for k, v in diff.value_diffs:
+                                unknown_variants.add("%s=%s" % (k, v))
+
                 if len(undefined_variants) > 0:
                         engine.error(_("variant(s) %(vars)s not defined by "
                             "%(pkg)s") %
@@ -243,7 +241,8 @@
                         actions = seen_deps[key]
                         if len(actions) > 1:
                                 has_conflict, conflict_vars = \
-                                    self.conflicting_variants(actions)
+                                    self.conflicting_variants(actions,
+                                        manifest.get_all_variants())
                                 if has_conflict:
                                         duplicates.append(key)
 
@@ -273,7 +272,8 @@
                         actions = seen_sets[key]
                         if len(actions) > 1:
                                 has_conflict, conflict_vars = \
-                                    self.conflicting_variants(actions)
+                                    self.conflicting_variants(actions,
+                                        manifest.get_all_variants())
                                 if has_conflict:
                                         duplicates.append(key)
 
--- a/src/modules/manifest.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/manifest.py	Tue Sep 28 15:55:54 2010 -0700
@@ -619,7 +619,7 @@
 
         def get_all_variants(self):
                 """Return a dictionary mapping variant tags to their values."""
-                return variant.VariantSets(dict((
+                return variant.VariantCombinationTemplate(dict((
                     (name, self.attributes[name])
                     for name in self.attributes
                     if name.startswith("variant.")
--- a/src/modules/publish/dependencies.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/publish/dependencies.py	Tue Sep 28 15:55:54 2010 -0700
@@ -24,6 +24,7 @@
 # Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
 #
 
+import copy
 import itertools
 import os
 import urllib
@@ -42,6 +43,7 @@
 
 paths_prefix = "%s.path" % base.Dependency.DEPEND_DEBUG_PREFIX
 files_prefix = "%s.file" % base.Dependency.DEPEND_DEBUG_PREFIX
+reason_prefix = "%s.reason" % base.Dependency.DEPEND_DEBUG_PREFIX
 
 class DependencyError(Exception):
         """The parent class for all dependency exceptions."""
@@ -56,9 +58,12 @@
                 self.source = source
 
         def __str__(self):
-                return _("The file dependency %s has paths which resolve "
-                    "to multiple packages. The actions are as follows:\n%s" %
-                    (self.source, "\n".join(["\t%s" % a for a in self.res])))
+                return _("The file dependency %(src)s has paths which resolve "
+                    "to multiple packages. The actions are as "
+                    "follows:\n%(acts)s") % {
+                        "src":self.source,
+                        "acts":"\n".join(["\t%s" % a for a in self.res])
+                    }
 
 class AmbiguousPathError(DependencyError):
         """This exception is used when multiple packages deliver a path which
@@ -69,9 +74,12 @@
                 self.source = source
 
         def __str__(self):
-                return _("The file dependency %s depends on a path delivered "
-                    "by multiple packages. Those packages are:%s" %
-                    (self.source, " ".join([str(p) for p in self.pkgs])))
+                return _("The file dependency %(src)s depends on a path "
+                    "delivered by multiple packages. Those packages "
+                    "are:%(pkgs)s") % {
+                        "src":self.source,
+                        "pkgs":" ".join([str(p) for p in self.pkgs])
+                    }
 
 class UnresolvedDependencyError(DependencyError):
         """This exception is used when no package delivers a file which is
@@ -83,16 +91,40 @@
                 self.pvars = pvars
 
         def __str__(self):
-                return _("%s has unresolved dependency '%s' under the "
-                    "following combinations of variants:\n%s") % \
-                    (self.path, self.file_dep,
-                    "\n".join([
-                        " ".join([("%s:%s" % (name, val)) for name, val in grp])
-                        for grp in self.pvars.get_unsatisfied().groups()
-                    ]))
+                return _("%(pth)s has unresolved dependency '%(dep)s' under "
+                    "the following combinations of variants:\n%(combo)s") % \
+                    {
+                        "pth":self.path,
+                        "dep":self.file_dep,
+                        "combo":"\n".join([
+                            " ".join([
+                                ("%s:%s" % (name, val)) for name, val in grp
+                            ])
+                            for grp in self.pvars.not_sat_set
+                    ])}
+
+class MissingPackageVariantError(DependencyError):
+        """This exception is used when an action is tagged with a variant or
+        variant value which the package is not tagged with."""
+
+        def __init__(self, act_vars, pkg_vars, pth):
+                self.act_vars = act_vars
+                self.pkg_vars = pkg_vars
+                self.path = pth
+
+        def __str__(self):
+                return _("The action delivering %(path)s is tagged with a "
+                    "variant type or value not tagged on the package. "
+                    "Dependencies on this file may fail to be reported.\n"
+                    "The action's variants are: %(act)s\nThe package's "
+                    "variants are: %(pkg)s") % {
+                        "path": self.path,
+                        "act": self.act_vars,
+                        "pkg": self.pkg_vars
+                    }
 
 def list_implicit_deps(file_path, proto_dirs, dyn_tok_conv, kernel_paths,
-    remove_internal_deps=True):
+    remove_internal_deps=True, convert=True):
         """Given the manifest provided in file_path, use the known dependency
         generators to produce a list of dependencies the files delivered by
         the manifest have.
@@ -106,15 +138,38 @@
         $PLATFORM, to the values they should be expanded to.
 
         'kernel_paths' contains the run paths which kernel modules should use.
-        """
+
+        'convert' determines whether PublishingDependencies will be transformed
+        to DependencyActions prior to being returned.  This is primarily an
+        option to facilitate testing and debugging."""
 
         m, missing_manf_files = __make_manifest(file_path, proto_dirs)
         pkg_vars = m.get_all_variants()
         deps, elist, missing, pkg_attrs = list_implicit_deps_for_manifest(m,
             proto_dirs, pkg_vars, dyn_tok_conv, kernel_paths)
+        rid_errs = []
         if remove_internal_deps:
-                deps = resolve_internal_deps(deps, m, proto_dirs, pkg_vars)
-        return deps, missing_manf_files + elist, missing, pkg_attrs
+                deps, rid_errs = resolve_internal_deps(deps, m, proto_dirs,
+                    pkg_vars)
+        if convert:
+                deps = convert_to_standard_dep_actions(deps)
+        return deps, missing_manf_files + elist + rid_errs, missing, pkg_attrs
+
+def convert_to_standard_dep_actions(deps):
+        """Convert pkg.base.Dependency objects to
+        pkg.actions.dependency.Dependency objects."""
+
+        res = []
+        for d in deps:
+                tmp = []
+                for c in d.dep_vars.not_sat_set:
+                        attrs = d.attrs.copy()
+                        attrs.update(c)
+                        tmp.append(actions.depend.DependencyAction(**attrs))
+                if not tmp:
+                        tmp.append(actions.depend.DependencyAction(**d.attrs))
+                res.extend(tmp)
+        return res
 
 def resolve_internal_deps(deps, mfst, proto_dirs, pkg_vars):
         """Given a list of dependencies, remove those which are satisfied by
@@ -131,34 +186,44 @@
         'pkg_vars' are the variants that this package was published against."""
 
         res = []
+        errs = []
         delivered = {}
         delivered_bn = {}
         for a in mfst.gen_actions_by_type("file"):
-                pvars = variants.VariantSets(a.get_variants())
+                pvars = a.get_variant_template()
                 if not pvars:
                         pvars = pkg_vars
                 else:
+                        if not pvars.issubset(pkg_vars):
+                                # This happens when an action in a package is
+                                # tagged with a variant type or value which the
+                                # package has not been tagged with.
+                                errs.append(
+                                    MissingPackageVariantError(pvars, pkg_vars,
+                                        a.attrs["path"]))
                         pvars.merge_unknown(pkg_vars)
+                pvc = variants.VariantCombinations(pvars, satisfied=True)
                 p = a.attrs["path"]
-                delivered.setdefault(p, variants.VariantSets()).merge(pvars)
+                delivered.setdefault(p, copy.copy(pvc))
                 p = os.path.join(a.attrs[portable.PD_PROTO_DIR], p)
                 np = os.path.normpath(p)
                 rp = os.path.realpath(p)
                 # adding the normalized path
-                delivered.setdefault(np, variants.VariantSets()).merge(pvars)
+                delivered.setdefault(np, copy.copy(pvc))
                 # adding the real path
-                delivered.setdefault(rp, variants.VariantSets()).merge(pvars)
+                delivered.setdefault(rp, copy.copy(pvc))
                 bn = os.path.basename(p)
-                delivered_bn.setdefault(bn, variants.VariantSets()).merge(pvars)
+                delivered_bn.setdefault(bn, copy.copy(pvc))
 
         for d in deps:
                 etype, pvars = d.resolve_internal(delivered_files=delivered,
                     delivered_base_names=delivered_bn)
                 if etype is None:
                         continue
+                pvars.simplify(pkg_vars)
                 d.dep_vars = pvars
                 res.append(d)
-        return res
+        return res, errs
 
 def no_such_file(action, **kwargs):
         """Function to handle dispatch of files not found on the system."""
@@ -229,7 +294,6 @@
                                 deps.extend(ds)
                                 elist.extend(errs)
                                 __update_pkg_attrs(pkg_attrs, attrs)
-
                         except base.DependencyAnalysisError, e:
                                 elist.append(e)
         for a in mfst.gen_actions_by_type("hardlink"):
@@ -323,20 +387,22 @@
         for pfmri, delivered_vars in lst:
                 # If the pfmri package isn't present under any of the variants
                 # where the dependency is, skip it.
-                if not orig_dep_vars.intersects(delivered_vars):
+                if not orig_dep_vars.intersects(delivered_vars,
+                    only_not_sat=True):
                         continue
+                vc = orig_dep_vars.intersection(delivered_vars)
+                vc.mark_all_as_satisfied()
                 for found_vars, found_fmri in vars:
                         # Because we don't have the concept of one-of
                         # dependencies, depending on a file which is delivered
                         # in multiple packages under a set of variants
                         # prevents automatic resolution of dependencies.
                         if found_fmri != pfmri and \
-                            (delivered_vars.intersects(found_vars) or
-                            found_vars.intersects(delivered_vars)):
+                            found_vars.intersects(delivered_vars):
                                 errs.add(found_fmri)
                                 errs.add(pfmri)
                 # Find the variants under which pfmri is relevant.
-                action_vars = orig_dep_vars.intersection(delivered_vars)
+                action_vars = vc
                 # Mark the variants as satisfied so it's possible to know if
                 # all variant combinations have been covered.
                 dep_vars.mark_as_satisfied(delivered_vars)
@@ -381,7 +447,7 @@
         'orig_dep_vars' is the original set of variants under which the
         dependency must be satisfied."""
 
-        res = None
+        res = []
         errs = []
         multiple_path_errs = {}
         for p in make_paths(file_dep):
@@ -452,8 +518,7 @@
         'pkg_vars' is the variants against which the package was published."""
 
         file_dep, orig_dep_vars = split_off_variants(file_dep, pkg_vars)
-        dep_vars = orig_dep_vars.copy()
-
+        dep_vars = copy.copy(orig_dep_vars)
         # First try to resolve the dependency against the delivered files.
         res, dep_vars, errs = find_package_using_delivered_files(delivered,
                 file_dep, dep_vars, orig_dep_vars)
@@ -465,16 +530,17 @@
         #
         # We only need to resolve for the variants not already satisfied
         # above.
+        const_dep_vars = copy.copy(dep_vars)
         inst_res, dep_vars, inst_errs = find_package_using_delivered_files(
-            installed, file_dep, dep_vars, dep_vars.get_unsatisfied())
+            installed, file_dep, dep_vars, const_dep_vars)
         res.extend(inst_res)
         errs.extend(inst_errs)
         return res, dep_vars, errs
 
 def is_file_dependency(act):
-        return (act.name == "depend" and
-            act.attrs.get("fmri", None) == base.Dependency.DUMMY_FMRI
-            and files_prefix in act.attrs)
+        return act.name == "depend" and \
+            act.attrs.get("fmri", None) == base.Dependency.DUMMY_FMRI and \
+            files_prefix in act.attrs
 
 def merge_deps(dest, src):
         """Add the information contained in src's attrs to dest."""
@@ -511,18 +577,26 @@
                 so that the groups match the duplicate actions that the code
                 in pkg.manifest notices."""
 
-                # d[0] is the action.  d[1] is the VariantSet for this action.
+                # d[0] is the action.  d[1] is the VariantCombination for this
+                # action.
                 return d[0].name, d[0].attrs.get(d[0].key_attr, id(d[0]))
 
         def add_vars(d, d_vars, pkg_vars):
                 """Add the variants 'd_vars' to the dependency 'd', after
                 removing the variants matching those defined in 'pkg_vars'."""
 
-                d_vars.remove_identical(pkg_vars)
-                d.attrs.update(d_vars)
-                # Remove any duplicate values for any attributes.
-                d.consolidate_attrs()
-                return d
+                d_vars.simplify(pkg_vars)
+                res = []
+                for s in d_vars.sat_set:
+                        attrs = d.attrs.copy()
+                        attrs.update(s)
+                        t = actions.depend.DependencyAction(**attrs)
+                        t.consolidate_attrs()
+                        res.append(t)
+                if not res:
+                        d.consolidate_attrs()
+                        res = [d]
+                return res
 
         def key_on_variants(a):
                 """Return the key (the VariantSets) to sort the grouped tuples
@@ -535,9 +609,9 @@
                 supersets of others are placed at the front of the list.  This
                 function assumes that a and b are both VariantSets."""
 
-                if a.issubset(b):
+                if a.issubset(b, satisfied=True):
                         return 1
-                elif b.issubset(a):
+                elif b.issubset(a, satisfied=True):
                         return -1
                 return 0
 
@@ -630,11 +704,12 @@
                                 # If d_vars is a subset of any variant set
                                 # already in the results, then d should be
                                 # combined with that dependency.
-                                if d_vars.issubset(rel_vars):
+                                if d_vars.issubset(rel_vars, satisfied=True):
                                         found_subset = True
                                         merge_deps(rel_res, d)
                                         break
-                                assert(not rel_vars.issubset(d_vars))
+                                assert(not rel_vars.issubset(d_vars,
+                                    satisfied=True))
 
                         # If no subset was found, then d_vars is a new set of
                         # conditions under which the dependency d should apply
@@ -645,20 +720,20 @@
                 # Add the variants to the dependency action and remove any
                 # variants that are identical to those defined by the package.
                 subres = [add_vars(d, d_vars, pkg_vars) for d, d_vars in subres]
-                res.extend(subres)
+                res.extend(itertools.chain.from_iterable(subres))
         return res
 
 def split_off_variants(dep, pkg_vars):
         """Take a dependency which may be tagged with variants and move those
         tags into a VariantSet."""
 
-        dep_vars = variants.VariantSets(dep.get_variants())
+        dep_vars = dep.get_variant_template()
         dep_vars.merge_unknown(pkg_vars)
         # Since all variant information is being kept in the above VariantSets,
         # remove the variant information from the action.  This prevents
         # confusion about which is the authoritative source of information.
         dep.strip_variants()
-        return dep, dep_vars
+        return dep, variants.VariantCombinations(dep_vars, satisfied=False)
 
 def prune_debug_attrs(action):
         """Given a dependency action with pkg.debug.depend attributes
@@ -695,10 +770,12 @@
                 for f in itertools.chain(mfst.gen_actions_by_type("file"),
                      mfst.gen_actions_by_type("hardlink"),
                      mfst.gen_actions_by_type("link")):
-                        dep_vars = variants.VariantSets(f.get_variants())
+                        dep_vars = f.get_variant_template()
                         dep_vars.merge_unknown(pvariants)
+                        vc = variants.VariantCombinations(dep_vars,
+                            satisfied=True)
                         pathdict.setdefault(f.attrs["path"], []).append(
-                            (pfmri, dep_vars))
+                            (pfmri, vc))
 
 
         # The variable 'manifests' is a list of 5-tuples. The first element
@@ -755,8 +832,8 @@
                 ]
                 for file_dep, (res, dep_vars, pkg_errs) in pkg_res:
                         errs.extend(pkg_errs)
+                        dep_vars.simplify(pkg_vars)
                         if not res:
-                                dep_vars.merge_unknown(pkg_vars)
                                 errs.append(UnresolvedDependencyError(mp,
                                     file_dep, dep_vars))
                         else:
--- a/src/modules/variant.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/modules/variant.py	Tue Sep 28 15:55:54 2010 -0700
@@ -20,10 +20,16 @@
 # CDDL HEADER END
 #
 
+#
 # Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+#
 
 # basic variant support
 
+import copy
+import itertools
+from collections import namedtuple
+
 from pkg.misc import EmptyI
 
 class Variants(dict):
@@ -91,61 +97,56 @@
                                 return False
                 return True
 
-class VariantSets(Variants):
-        """Class for holding sets of variants. The parent class is designed to
-        hold one value per variant. This class is used when multiple values for
-        a variant need to be used. It ensures that the value each variant
-        maps to is a set of one or more variant values."""
-
-        def __init__(self, init=EmptyI):
-                self.set_sats = False
-                self.not_sat_set = None
-                Variants.__init__(self, init)
+# The two classes which follow are used during dependency calculation when
+# actions have variants, or the packages they're contained in do.  The
+# VariantCombinationTemplate corresponds to information that is encoded in
+# the actions.  Specifically, it records what types of variants exist
+# (variant.arch or variant.debug) and what values are known to exist for them
+# (x86/sparc or debug/non-debug).  The variant types are the keys of the
+# dictionary while the variant values are what the keys map to.
+#
+# The VariantCombinations class serves a different purpose.  In order to
+# determine whether a dependency is satisfied under all combinations of
+# variants, it is necessary to track whether each combination has been
+# satisfied.  When a VariantCombinations is created, it is provided a
+# VariantCombinationTemplate which it uses to seed the combinations of variants.
+# To make a single combination instance, for each type of variant, it chooses
+# one value and adds it to the instance.  It creates all possible combination
+# instances and these are what it uses to track whether all combinations have
+# been satisfied.  The class also provides methods for manipulating the
+# instances while maintaining consistency between the satisfied set and the
+# unsatisfied set.
 
-        def update(self, d):
-                for a in d:
-                        if isinstance(d[a], set):
-                                self[a] = d[a]
-                        elif isinstance(d[a], list):
-                                self[a] = set(d[a])
-                        else:
-                                self[a] = set([d[a]])
+class VariantCombinationTemplate(Variants):
+        """Class for holding a template of variant types and their potential
+        values."""
 
-        def copy(self):
-                return VariantSets(self)
+        def __copy__(self):
+                return VariantCombinationTemplate(self)
 
         def __setitem__(self, item, value):
-                assert(not self.set_sats)
+                """Overrides Variants.__setitem__ to ensure that all values are
+                sets."""
+
                 if isinstance(value, list):
                         value = set(value)
                 elif not isinstance(value, set):
                         value = set([value])
                 Variants.__setitem__(self, item, value)
 
-        def merge(self, var):
-                """Combine two sets of variants into one."""
-                for name in var:
-                        if name in self:
-                                self[name].update(var[name])
-                        else:
-                                self[name] = var[name]
-
         def issubset(self, var):
                 """Returns whether self is a subset of variant var."""
+                res = self.difference(var)
+                return not res.type_diffs and not res.value_diffs
+
+        def difference(self, var):
+                res = VCTDifference([], [])
                 for k in self:
                         if k not in var:
-                                return False
-                        if self[k] - var[k]:
-                                return False
-                return True
-
-        def difference(self, var):
-                """Returns the variants in self and not in var."""
-                res = VariantSets()
-                for k in self:
-                        tmp = self[k] - var.get(k, set())
-                        if tmp:
-                                res[k] = tmp
+                                res.type_diffs.append(k)
+                        else:
+                                for v in self[k] - var[k]:
+                                        res.value_diffs.append((k, v))
                 return res
 
         def merge_unknown(self, var):
@@ -154,112 +155,218 @@
                         if name not in self:
                                 self[name] = var[name]
 
-        def intersects(self, var):
-                """Returns whether self and var share at least one value for
-                each variant in self."""
-                for k in self:
-                        if k not in var:
-                                return False
-                        found = False
-                        for v in self[k]:
-                                if v in var[k]:
-                                        found = True
-                                        break
-                        if not found:
-                                return False
-                return True
+        def __repr__(self):
+                return "VariantTemplate(%s)" % dict.__repr__(self)
+
+        def __str__(self):
+                s = ""
+                for k in sorted(self):
+                        t = ",".join(['"%s"' % v for v in sorted(self[k])])
+                        s += " %s=%s" % (k, t)
+                if s:
+                        return s
+                else:
+                        return " <none>"
+
+
+VCTDifference = namedtuple("VCTDifference", ["type_diffs", "value_diffs"])
+# Namedtuple used to store the results of VariantCombinationTemplate
+# differences.  The type_diffs field stores the variant types which are in the
+# caller and not in the argument to difference.  The value_diffs field stores
+# the values for particular types which are in the caller and not in the
+# argument to difference.
+
+
+class VariantCombinations(object):
+        """Class for keeping track of which combinations of variant values have
+        and have not been satisfied for a particular action."""
+
+        def __init__(self, vct, satisfied):
+                """Create an instance of VariantCombinations based on the
+                template provided.
+
+                The 'vct' parameter is the template from which to build the
+                combinations.
+
+                The 'satisfied' parameter is a boolean which determines whether
+                the combinations created from the template will be considered
+                satisfied or unsatisfied."""
 
-        def intersection(self, var):
+                assert(isinstance(vct, VariantCombinationTemplate))
+                self.__sat_set = set()
+                self.__not_sat_set = set()
+                tmp = []
+                # This builds all combinations of variant values presented in
+                # vct.
+                for k in sorted(vct):
+                        if not tmp:
+                                # Initialize tmp with the key-value pairs for
+                                # the first key in vct.
+                                tmp = [[(k, v)] for v in vct[k]]
+                                continue
+                        # For each subsequent key in vct, append each of its
+                        # key-value pairs to each of the existing combinations.
+                        new_tmp = [
+                            exist[:] + [(k, v)] for v in vct[k]
+                            for exist in tmp
+                        ]
+                        tmp = new_tmp
+                # Here is an example of how the loop above would handle a vct
+                # of { 1:["a", "b"], 2:["x", "y"], 3:["m", "n"] }
+                # First, tmp would be initialized as [[(1, "a")], [(1, "b")]]
+                # Next, a new list is created by adding (2, "x") to a copy
+                # of each item in tmp, and then (2, "y"). This produces
+                # [[(1, "a"), (2, "x")], [(1, "a"), (2, "y")],
+                #  [(1, "b"), (2, "x")], [(1, "b"), (2, "y")]]
+                # That process is repeated one more time for the 3 key,
+                # resulting in:
+                # [[(1, "a"), (2, "x"), (3, "m")],
+                #  [(1, "a"), (2, "x"), (3, "n")],
+                #  [(1, "a"), (2, "y"), (3, "m")],
+                #  [(1, "a"), (2, "y"), (3, "n")],
+                #  [(1, "b"), (2, "x"), (3, "m")],
+                #  [(1, "b"), (2, "x"), (3, "n")],
+                #  [(1, "b"), (2, "y"), (3, "m")],
+                #  [(1, "b"), (2, "y"), (3, "n")]]
+                res = set()
+                for lst in tmp:
+                        res.add(frozenset(lst))
+                if satisfied:
+                        self.__sat_set = res
+                else:
+                        self.__not_sat_set = res
+                self.__template = copy.copy(vct)
+                self.__simpl_template = None
+
+        @property
+        def template(self):
+                return self.__template
+
+        @property
+        def sat_set(self):
+                if not self.__simpl_template:
+                        return self.__sat_set
+                else:
+                        return self.__calc_simple(True)
+
+        @property
+        def not_sat_set(self):
+                if not self.__simpl_template:
+                        return self.__not_sat_set
+                else:
+                        return self.__calc_simple(False)
+
+        def __copy__(self):
+                vc = VariantCombinations(self.__template, True)
+                vc.__sat_set = copy.copy(self.__sat_set)
+                vc.__not_sat_set = copy.copy(self.__not_sat_set)
+                vc.__simpl_template = self.__simpl_template
+                return vc
+                
+        def is_empty(self):
+                """Returns whether self was created with any potential variant
+                values."""
+
+                return not self.__sat_set and not self.__not_sat_set
+                
+        def issubset(self, vc, satisfied):
+                """Returns whether the instances in self are a subset of the
+                instances in vc. 'satisfied' determines whether the instances
+                compared are drawn from the set of satisfied instances or the
+                set of unsatisfied instances."""
+
+                if satisfied:
+                        return self.__sat_set.issubset(vc.__sat_set)
+                else:
+                        return self.__not_sat_set.issubset(vc.__not_sat_set)
+
+        def intersects(self, vc, only_not_sat=False):
+                """Returns whether an action whose variants are vc could satisfy
+                dependencies whose variants are self.
+
+                'only_not_sat' determines whether only the unsatisfied set of
+                variants for self is used for comparision.  When only_not_sat
+                is True, then intersects returns wether vc would satisfy at
+                least one instance which is currently unsatisfied."""
+
+                if self.is_empty() or vc.is_empty():
+                        return True
+                tmp = self.intersection(vc)
+                if only_not_sat:
+                        return bool(tmp.__not_sat_set)
+                return not tmp.is_empty()
+                
+        def intersection(self, vc):
                 """Find those variant values in self that are also in var, and
                 return them."""
-
-                res = VariantSets()
-                for k in self:
-                        if k not in var:
-                                raise RuntimeError("%s cannot be intersected "
-                                    "with %s becuase %s is not a key in the "
-                                    "latter." % (self, var, k))
-                        res[k] = self[k] & var[k]
+                assert len(vc.not_sat_set) == 0
+                res = copy.copy(self)
+                res.__sat_set &= vc.__sat_set
+                res.__not_sat_set &= vc.__sat_set
                 return res
 
-        def __variant_cross_product(self):
-                """Generates the cross product of all the values for all the
-                variants in self."""
+        def mark_as_satisfied(self, vc):
+                """For all instances in vc, mark those instances as being
+                satisfied."""
 
-                tmp = []
-                for k in sorted(self):
-                        if tmp == []:
-                                tmp = [[v] for v in self[k]]
-                                continue
-                        new_tmp = []
-                        new_tmp.extend([
-                            exist[:] + [v] for v in self[k]
-                            for exist in tmp
-                        ])
-                        tmp = new_tmp
-                return set([tuple(v) for v in tmp])
+                for s in vc.__sat_set:
+                        if s in self.__not_sat_set:
+                                self.__not_sat_set.remove(s)
+                                self.__sat_set.add(s)
 
-        def mark_as_satisfied(self, var):
-                """Mark those variant combinations seen in var as being
-                satisfied in self."""
+        def mark_all_as_satisfied(self):
+                """Mark all instances as being satisfied."""
 
-                if not self.set_sats:
-                        self.set_sats = True
-                        self.not_sat_set = self.__variant_cross_product()
-                self.not_sat_set -= var.__variant_cross_product()
+                self.__sat_set |= self.__not_sat_set
+                self.__not_sat_set = set()
+                
 
         def is_satisfied(self):
                 """Returns whether all variant combinations for this package
                 have been satisfied."""
 
-                return (self.set_sats and not self.not_sat_set) or not self
+                return not self.__not_sat_set
 
-        def groups(self):
-                """Return a grouped list of the variant combinations in this
-                VariantSets object"""
+        def simplify(self, vct, assert_on_different_domains=True):
+                """Store the provided VariantCombinationTemplate as the template
+                to use when simplifying the combinations."""
 
-                var_names = sorted(self)
-                return [zip(var_names, tup)
-                    for tup in sorted(self.__variant_cross_product())]
+                if not self.__template.issubset(vct):
+                        self.__simpl_template = {}                
+                        if assert_on_different_domains:
+                                assert self.__template.issubset(vct), \
+                                    "template:%s\nvct:%s" % \
+                                    (self.__template, vct)
+                self.__simpl_template = vct
 
-        def get_satisfied(self):
-                """Returns the combinations of variants which have been
-                satisfied for this VariantSets."""
-
-                if self == {} or not self.set_sats:
-                        return VariantSets()
+        def __calc_simple(self, sat):
+                """Given VariantCombinationTemplate to be simplified against,
+                reduce the instances to the empty set if the instances cover all
+                possible combinations of the template provided.
 
-                var_names = sorted(self)
-                satisfied = self.__variant_cross_product() - self.not_sat_set
-                f = {}
-                for tup in sorted(satisfied):
-                        for key, var in zip(var_names, tup):
-                                f.setdefault(key, set()).add(var)
-                return VariantSets(f)
+                A general approach to simplification is currently deemed to
+                difficult in the face of arbitrary numbers of variant types and
+                arbitrary numbers of variant."""
 
-        def get_unsatisfied(self):
-                """Returns the variant combinations for self which have not
-                been satisfied."""
-                if not self.set_sats:
-                        self.set_sats = True
-                        self.not_sat_set = self.__variant_cross_product()
+                if not self.__simpl_template:
+                        possibilities = 0
+                else:
+                        possibilities = 1
+                        for k in self.__simpl_template:
+                                possibilities *= len(self.__simpl_template[k])
 
-                var_names = sorted(self)
-                f = {}
-                for tup in sorted(self.not_sat_set):
-                        for key, var in zip(var_names, tup):
-                                f.setdefault(key, set()).add(var)
-                return VariantSets(f)
+                if sat:
+                        rel_set = self.__sat_set
+                else:
+                        rel_set = self.__not_sat_set
 
-        def remove_identical(self, var):
-                """For each key in self, remove it from the dictionary if its
-                values are identical to the values that var maps k to."""
-
-                for k in self.keys():
-                        if k not in var:
-                                continue
-                        if self[k] == var[k]:
-                                del self[k]
+                # If the size of sat_set or not_sat_set matches the number of
+                # possibilities a template can produce, then it can be
+                # simplified.
+                if possibilities != len(rel_set):
+                        return rel_set
+                return set()
 
         def __repr__(self):
-                return "VariantSets(%s)" % dict.__repr__(self)
+                return "VC Sat:%s Unsat:%s" % (sorted(self.sat_set),
+                    sorted(self.not_sat_set))
--- a/src/pkgdep.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/pkgdep.py	Tue Sep 28 15:55:54 2010 -0700
@@ -307,17 +307,6 @@
                 return not act.name == "depend" or \
                     act.attrs["type"] != "require"
 
-def explode(dep_with_variantsets):
-        sat_tups = dep_with_variantsets.get_variants().get_satisfied().groups()
-        if not sat_tups:
-                return dep_with_variantsets
-        res = []
-        for tup in sat_tups:
-                attrs = dep_with_variantsets.attrs.copy()
-                attrs.update(dict(tup))
-                res.append(str(actions.depend.DependencyAction(**attrs)))
-        return "\n".join(res).rstrip()
-
 def pkgdeps_to_screen(pkg_deps, manifest_paths, echo_manifest):
         """Write the resolved package dependencies to stdout.
 
@@ -345,7 +334,7 @@
                                     p)
                                 ret_code = 1
                 for d in pkg_deps[p]:
-                        msg(explode(d))
+                        msg(d)
                 msg(_("\n\n"))
         return ret_code
 
@@ -383,7 +372,7 @@
                                 out_fh.write(l)
                 fh.close()
         for d in deps:
-                out_fh.write("%s\n" % explode(d))
+                out_fh.write("%s\n" % d)
         out_fh.close()
         return ret_code
 
--- a/src/tests/api/t_dependencies.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/tests/api/t_dependencies.py	Tue Sep 28 15:55:54 2010 -0700
@@ -745,10 +745,10 @@
                                             d.action.attrs["path"], "baz")
                 t_path = self.make_manifest(self.ext_hardlink_manf)
                 _check_results(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, []))
+                    [self.proto_dir], {}, [], convert=False))
                 _check_results(dependencies.list_implicit_deps(t_path,
                     [self.proto_dir], {}, [],
-                    remove_internal_deps=False))
+                    remove_internal_deps=False, convert=False))
 
         def test_int_hardlink(self):
                 """Check that a hardlink with a target inside the package is
@@ -759,7 +759,7 @@
                 self.make_proto_text_file(self.paths["syslog_path"])
                 ds, es, ms, pkg_attrs = \
                     dependencies.list_implicit_deps(t_path, [self.proto_dir],
-                        {}, [])
+                        {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -768,7 +768,8 @@
 
                 # Check that internal dependencies are as expected.
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -803,9 +804,10 @@
                 self.make_proto_text_file(self.paths["script_path"],
                     self.script_text)
                 _check_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, []))
+                    [self.proto_dir], {}, [], convert=False))
                 _check_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False))
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False))
 
         def test_int_script(self):
                 """Check that a file that starts with #! and references a file
@@ -817,7 +819,7 @@
                 self.make_proto_text_file(self.paths["script_path"],
                     self.script_text)
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                    [self.proto_dir], {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -832,7 +834,8 @@
 
                 # Check that internal dependencies are as expected.
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
                 self.assertEqual(len(ds), 2)
                 for d in ds:
                         self.assert_(d.is_error())
@@ -875,9 +878,10 @@
                 t_path = self.make_manifest(self.ext_elf_manf)
                 self.make_elf(self.paths["curses_path"])
                 _check_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, []))
+                    [self.proto_dir], {}, [], convert=False))
                 _check_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False))
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False))
 
         def test_int_elf(self):
                 """Check that an elf file that requires a library inside its
@@ -905,8 +909,8 @@
                 t_path = self.make_manifest(self.int_elf_manf)
                 self.make_elf(self.paths["curses_path"])
                 self.make_elf(self.paths["libc_path"], static=True)
-                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(
+                    t_path, [self.proto_dir], {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -915,7 +919,8 @@
 
                 # Check that internal dependencies are as expected.
                 _check_all_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False))
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False))
 
         def test_ext_python_dep(self):
                 """Check that a python file that imports a module outside its
@@ -961,9 +966,10 @@
                 self.make_proto_text_file(self.paths["indexer_path"],
                     self.python_text)
                 _check_all_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, []))
+                    [self.proto_dir], {}, [], convert=False))
                 _check_all_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False))
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False))
 
         def test_ext_python_abs_import_dep(self):
                 """Check that a python file that uses absolute imports a module
@@ -1012,9 +1018,10 @@
                 self.make_proto_text_file(self.paths["indexer_path"],
                     self.python_abs_text)
                 _check_all_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, []))
+                    [self.proto_dir], {}, [], convert=False))
                 _check_all_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False))
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False))
 
         def test_ext_python_pkg_dep(self):
                 """Check that a python file that is the __init__.py file for a
@@ -1060,9 +1067,10 @@
                 self.make_proto_text_file(self.paths["pkg_path"],
                     self.python_text)
                 _check_all_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, []))
+                    [self.proto_dir], {}, [], convert=False))
                 _check_all_res(dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False))
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False))
 
         def test_variants_1(self):
                 """Test that a file which satisfies a dependency only under a
@@ -1074,7 +1082,7 @@
                     self.script_text)
                 self.make_elf(self.paths["ksh_path"])
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                    [self.proto_dir], {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1086,24 +1094,27 @@
                             self.paths["ksh_path"]):
                                 self.assertEqual(d.action.attrs["path"],
                                     self.paths["script_path"])
-                                self.assertEqual(len(d.dep_vars), 1)
-                                self.assert_("variant.arch" in d.dep_vars)
-                                expected_vars = set([("bar",), ("baz",)])
-                                for v in d.dep_vars.not_sat_set:
-                                        if v not in expected_vars:
-                                                raise RuntimeError("Variant %s "
-                                                    "was not in %s" %
-                                                     (v, expected_vars))
-                                        expected_vars.remove(v)
-                                self.assertEqual(expected_vars, set())
+                                expected_not_sat = set([
+                                    frozenset([("variant.arch", "bar")]),
+                                    frozenset([("variant.arch", "baz")])])
+                                expected_sat = set([
+                                    frozenset([("variant.arch", "foo")])])
+                                self.assertEqual(expected_sat,
+                                    d.dep_vars.sat_set)
+                                self.assertEqual(expected_not_sat,
+                                    d.dep_vars.not_sat_set)
                         elif d.dep_key() == self.__path_to_key(
                             self.paths["libc_path"]):
                                 self.assertEqual(
                                     d.action.attrs["path"],
                                     self.paths["ksh_path"])
-                                self.assertEqual(
-                                    set(d.dep_vars["variant.arch"]),
-                                    set(["foo"]))
+                                expected_not_sat = set([
+                                    frozenset([("variant.arch", "foo")])])
+                                expected_sat = set()
+                                self.assertEqual(expected_sat,
+                                    d.dep_vars.sat_set)
+                                self.assertEqual(expected_not_sat,
+                                    d.dep_vars.not_sat_set)
                         else:
                                 raise RuntimeError("Unexpected "
                                     "dependency path:%s" % (d.dep_key(),))
@@ -1114,10 +1125,11 @@
                 dependency, an external dependency is not reported."""
 
                 t_path = self.make_manifest(self.variant_manf_2)
-                self.make_proto_text_file(self.paths["script_path"], self.script_text)
+                self.make_proto_text_file(self.paths["script_path"],
+                    self.script_text)
                 self.make_elf(self.paths["ksh_path"])
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                    [self.proto_dir], {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1125,14 +1137,17 @@
                 self.assert_(len(ds) == 1)
                 d = ds[0]
                 self.assert_(d.is_error())
-                self.assertEqual(set(d.dep_vars.keys()), set(["variant.arch"]))
-                self.assertEqual(set(d.dep_vars["variant.arch"]), set(["foo"]))
+                expected_not_sat = set([frozenset([("variant.arch", "foo")])])
+                expected_sat = set()
+                self.assertEqual(expected_sat, d.dep_vars.sat_set)
+                self.assertEqual(expected_not_sat, d.dep_vars.not_sat_set)
                 self.assertEqual(d.base_names[0], "libc.so.1")
                 self.assertEqual(set(d.run_paths), set(["lib", "usr/lib"]))
 
                 # Check that internal dependencies are as expected.
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1140,10 +1155,15 @@
                 self.assert_(len(ds) == 2)
                 for d in ds:
                         self.assert_(d.is_error())
-                        self.assertEqual(set(d.dep_vars.keys()),
-                           set(["variant.arch"]))
-                        self.assertEqual(set(d.dep_vars["variant.arch"]),
-                            set(["foo"]))
+                        # Because not removing internal dependencies means that
+                        # no resolution of their variants happens, both
+                        # dependencies have their variants as unsatisfied.
+                        expected_not_sat = set([
+                            frozenset([("variant.arch", "foo")])])
+                        expected_sat = set()
+                        self.assertEqual(expected_sat, d.dep_vars.sat_set)
+                        self.assertEqual(expected_not_sat,
+                            d.dep_vars.not_sat_set)
                         if d.dep_key() == self.__path_to_key(
                             self.paths["ksh_path"]):
                                 self.assertEqual(d.action.attrs["path"],
@@ -1167,7 +1187,7 @@
                     self.script_text)
                 self.make_elf(self.paths["ksh_path"])
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                    [self.proto_dir], {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1179,36 +1199,42 @@
                             self.paths["ksh_path"]):
                                 self.assertEqual(d.action.attrs["path"],
                                     self.paths["script_path"])
-                                self.assertEqual(set(d.dep_vars.keys()),
-                                    set(["variant.arch"]))
-                                self.assertEqual(
-                                    set(d.dep_vars["variant.arch"]),
-                                    set(["bar"]))
+                                expected_not_sat = set([
+                                    frozenset([("variant.arch", "bar")])])
+                                expected_sat = set()
+                                self.assertEqual(expected_sat,
+                                    d.dep_vars.sat_set)
+                                self.assertEqual(expected_not_sat,
+                                    d.dep_vars.not_sat_set)
                         elif d.dep_key() == self.__path_to_key(
                             self.paths["libc_path"]):
                                 self.assertEqual(d.action.attrs["path"],
                                     self.paths["ksh_path"])
-                                self.assertEqual(set(d.dep_vars.keys()),
-                                    set(["variant.arch"]))
-                                self.assertEqual(
-                                    set(d.dep_vars["variant.arch"]),
-                                    set(["foo"]))
+                                expected_not_sat = set([
+                                    frozenset([("variant.arch", "foo")])])
+                                expected_sat = set()
+                                self.assertEqual(expected_sat,
+                                    d.dep_vars.sat_set)
+                                self.assertEqual(expected_not_sat,
+                                    d.dep_vars.not_sat_set)
                         else:
                                 raise RuntimeError("Unexpected "
                                     "dependency path:%s" % (d.dep_key(),))
 
         def test_variants_4(self):
-                """Test that an action with a variant that depends on a delivered action
-                also tagged with that variant, but not with a package-level variant is
-                reported as an internal dependency, not an external one."""
+                """Test that an action with a variant that depends on a
+                delivered action also tagged with that variant, but not with a
+                package-level variant is reported as an internal dependency, not
+                an external one."""
 
                 t_path = self.make_manifest(self.variant_manf_4)
-                self.make_proto_text_file(self.paths["script_path"], self.script_text)
+                self.make_proto_text_file(self.paths["script_path"],
+                    self.script_text)
                 self.make_elf(self.paths["ksh_path"])
 
                 # Check that we only report a single external dependency
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                    [self.proto_dir], {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1217,18 +1243,20 @@
                 d = ds[0]
 
                 self.assert_(d.is_error())
-                self.assertEqual(set(d.dep_vars.keys()), set(["variant.arch",
-                    "variant.opensolaris.zone"]))
-                self.assertEqual(set(d.dep_vars["variant.arch"]), set(["foo"]))
-                self.assertEqual(set(d.dep_vars["variant.opensolaris.zone"]),
-                    set(["global"]))
+                expected_not_sat = set([frozenset([
+                    ("variant.arch", "foo"),
+                    ("variant.opensolaris.zone", "global")])])
+                expected_sat = set()
+                self.assertEqual(expected_sat, d.dep_vars.sat_set)
+                self.assertEqual(expected_not_sat, d.dep_vars.not_sat_set)
 
                 self.assertEqual(d.base_names[0], "libc.so.1")
                 self.assertEqual(set(d.run_paths), set(["lib", "usr/lib"]))
 
                 # Check that internal dependencies are as expected.
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1237,10 +1265,16 @@
                 self.assert_(len(ds) == 2)
                 for d in ds:
                         self.assert_(d.is_error())
-                        self.assertEqual(set(d.dep_vars.keys()),
-                            set(["variant.opensolaris.zone"]))
-                        self.assertEqual(set(d.dep_vars["variant.opensolaris.zone"]),
-                            set(["global"]))
+                        # Because not removing internal dependencies means that
+                        # no resolution of their variants happens, both
+                        # dependencies have their variants as unsatisfied.
+                        expected_not_sat = set([frozenset([
+                            ("variant.arch", "foo"),
+                            ("variant.opensolaris.zone", "global")])])
+                        expected_sat = set()
+                        self.assertEqual(expected_sat, d.dep_vars.sat_set)
+                        self.assertEqual(expected_not_sat,
+                            d.dep_vars.not_sat_set)
 
                         if d.dep_key() == self.__path_to_key(
                             self.paths["ksh_path"]):
@@ -1252,7 +1286,8 @@
                                     self.paths["ksh_path"])
                         else:
                                 raise RuntimeError(
-                                    "Unexpected dependency path:%s" % (d.dep_key(),))
+                                    "Unexpected dependency path:%s" % \
+                                    (d.dep_key(),))
 
         def test_symlinks(self):
                 """Test that a file is recognized as delivered when a symlink
@@ -1277,7 +1312,7 @@
                 t_path = self.make_manifest(
                     self.int_hardlink_manf_test_symlink)
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                    [self.proto_dir], {}, [], convert=False)
 
         def test_str_methods(self):
                 """Test the str methods of objects in the flavor space."""
@@ -1323,8 +1358,8 @@
 
                 # This should fail because the "foo" directory is not given
                 # as a proto_dir.
-                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(
+                    t_path, [self.proto_dir], {}, [], convert=False)
                 if len(es) != 1:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1337,9 +1372,9 @@
 
                 # This should work since the "foo" directory has been added to
                 # the list of proto_dirs to use.
-                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir, os.path.join(self.proto_dir, "foo")],
-                    {}, [])
+                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(
+                    t_path, [self.proto_dir,
+                    os.path.join(self.proto_dir, "foo")], {}, [], convert=False)
                 if es:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1349,9 +1384,10 @@
                 # This should be different because the empty text file
                 # is found before the binary file.
                 self.make_proto_text_file(self.paths["curses_path"])
-                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir, os.path.join(self.proto_dir, "foo")],
-                    {}, [], remove_internal_deps=False)
+                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(
+                    t_path, [self.proto_dir,
+                    os.path.join(self.proto_dir, "foo")], {}, [],
+                    remove_internal_deps=False, convert=False)
                 if es:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1363,9 +1399,10 @@
 
                 # This should find the binary file first and thus produce
                 # a depend action.
-                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [os.path.join(self.proto_dir, "foo"), self.proto_dir],
-                    {}, [], remove_internal_deps=False)
+                d_map, es, ms, pkg_attrs = dependencies.list_implicit_deps(
+                    t_path, [os.path.join(self.proto_dir, "foo"),
+                    self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
                 if es:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1380,7 +1417,7 @@
                 # list of proto_dirs.
                 ds, es, ms, pkg_attrs = \
                     dependencies.list_implicit_deps(t_path, [self.proto_dir],
-                        {}, [])
+                        {}, [], convert=False)
                 if len(es) != 1:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1396,7 +1433,7 @@
                 ds, es, ms, pkg_attrs = \
                     dependencies.list_implicit_deps(t_path,
                         [self.proto_dir, os.path.join(self.proto_dir, "foo")],
-                        {}, [])
+                        {}, [], convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1449,7 +1486,7 @@
                 # This should have an error because it cannot find the file
                 # needed.
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [])
+                    [self.proto_dir], {}, [], convert=False)
                 if len(es) != 1:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1464,7 +1501,7 @@
                 # normally.
                 _py_check_all_res(dependencies.list_implicit_deps(t_path,
                     [self.proto_dir, os.path.join(self.proto_dir, "d5")], {},
-                    []))
+                    [], convert=False))
 
 
         def test_smf_manifest_parse(self):
@@ -1535,7 +1572,8 @@
                 self.make_smf_test_files()
 
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1557,7 +1595,8 @@
 
                 # verify that removing internal dependencies works as expected
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=True)
+                    [self.proto_dir], {}, [], remove_internal_deps=True,
+                    convert=False)
                 self.assert_(len(ds) == 0, "Expected 0 dependencies, got %s" %
                     len(ds))
 
@@ -1570,7 +1609,8 @@
                 self.make_smf_test_files()
 
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
                 if es != []:
                         raise RuntimeError("Got errors in results:" +
                             "\n".join([str(s) for s in es]))
@@ -1601,7 +1641,8 @@
                 self.make_smf_test_files()
 
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
 
                 self.assertEqual(len(ms), 1, "No unknown files reported during "
                     "analysis")
@@ -1650,7 +1691,8 @@
                 self.make_smf_test_files()
 
                 ds, es, ms, pkg_attrs = dependencies.list_implicit_deps(t_path,
-                    [self.proto_dir], {}, [], remove_internal_deps=False)
+                    [self.proto_dir], {}, [], remove_internal_deps=False,
+                    convert=False)
 
                 self.assert_(len(es) == 3,
                     "Detected %s error(s), expected 3" % len(es))
--- a/src/tests/api/t_pkglint.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/tests/api/t_pkglint.py	Tue Sep 28 15:55:54 2010 -0700
@@ -108,7 +108,7 @@
 file 1d5eac1aab628317f9c088d21e4afda9c754bb76 chash=43dbb3e0bc142f399b61d171f926e8f91adcffe2 elfarch=i386 elfbits=32 elfhash=64c67b16be970380cd5840dd9753828e0c5ada8c group=sys mode=2755 owner=root path=usr/sbin/prtdiag pkg.csize=5490 pkg.size=13572 variant.arch=sparc
 """
 
-expected_failures["dup-clashing-vars.mf"] = ["pkglint.dupaction001.2"]
+expected_failures["dup-clashing-vars.mf"] = ["pkglint.dupaction001.1"]
 broken_manifests["dup-clashing-vars.mf"] = \
 """
 #
@@ -249,11 +249,12 @@
 # usr/sbin/fsadmin is ref-counted but has different attributes across duplicates in [<pkg.fmri.PkgFmri 'pkg://opensolaris.org/pkglint/[email protected],5.11-0.141:20100604T143737Z' at 0x8733e2c>]
 # usr/sbin/fsadmin delivered by [<pkg.fmri.PkgFmri 'pkg://opensolaris.org/pkglint/[email protected],5.11-0.141:20100604T143737Z' at 0x8733e2c>] is a duplicate, declaring overlapping variants ['variant.other']
 expected_failures["dup-types-clashing-vars.mf"] = ["pkglint.dupaction008",
-    "pkglint.dupaction007", "pkglint.dupaction001.2"]
+    "pkglint.dupaction007", "pkglint.dupaction001.1"]
 broken_manifests["dup-types-clashing-vars.mf"] = \
 """
 #
-# we try to deliver usr/sbin/fsadmin with different action types, declaring a variant on one.
+# we try to deliver usr/sbin/fsadmin with different action types, declaring a
+# variant on both.
 #
 set name=pkg.fmri value=pkg://opensolaris.org/pkglint/[email protected],5.11-0.141:20100604T143737Z
 set name=org.opensolaris.consolidation value=osnet
@@ -486,14 +487,14 @@
 # 2 for the "linted"-handling code, saying that we're not linting these actions
 #
 expected_failures["linted-action.mf"] = ["pkglint001.2", "pkglint001.2",
-    "pkglint.action002.2", "pkglint.dupaction003.2"]
+    "pkglint.action002.2", "pkglint.dupaction003.1"]
 broken_manifests["linted-action.mf"] = \
 """
 #
-# we deliver some duplicate ref-counted actions (dir, link, hardlink) with differing
-# attributes, but since they're marked as linted, we should get no output, we should
-# still get the duplicate user though, as well as the unusual mode check for the
-# version of the path that's 0751
+# we deliver some duplicate ref-counted actions (dir, link, hardlink) with
+# differing attributes, but since they're marked as linted, we should get no
+# output, we should still get the duplicate user though, as well as the unusual
+# mode check for the version of the path that's 0751
 #
 set name=pkg.fmri value=pkg://opensolaris.org/pkglint/[email protected],5.11-0.141:20100604T143737Z
 set name=org.opensolaris.consolidation value=osnet
@@ -815,6 +816,30 @@
 file nohash elfarch=i386 elfbits=32 elfhash=2d5abc9b99e65c52c1afde443e9c5da7a6fcdb1e group=bin mode=0755 owner=root path=usr/bin/xfs pkg.csize=68397 pkg.size=177700 variant.arch=i386
 """
 
+# There are four expected failures because of the combinations of variant.arch
+# and variant.opensolaris.zone.
+expected_failures["dup-action-variant-subset.mf"] = \
+    ["pkglint.dupaction001.2"] * 4
+broken_manifests["dup-action-variant-subset.mf"] = \
+"""
+set name=pkg.fmri value=pkg://opensolaris.org/developer/build/[email protected],5.11-0.147:20100827T060516Z
+set name=pkg.summary value="Solaris Bundled tools"
+set name=description value="Solaris Bundled tools"
+set name=pkg.description value="Solaris Bundled tools"
+set name=variant.arch value=sparc value=i386
+#
+set name=variant.opensolaris.zone value=global value=nonglobal
+set name=variant.fish value=else value=other
+set name=variant.carrots value=foo
+#
+set name=org.opensolaris.consolidation value=sunpro
+set name=info.classification value="org.opensolaris.category.2008:Development/Source Code Management"
+dir group=sys mode=0755 owner=root path=usr
+dir group=bin mode=0755 owner=root path=usr/bin
+file /usr/bin/make elfarch=i386 variant.carrots=foo elfbits=32 group=bin mode=0755 owner=root path=usr/bin/make pkg.csize=93425 pkg.size=219296 
+file /usr/bin/make elfarch=i386 variant.fish=else elfbits=32 group=bin mode=0755 owner=root path=usr/bin/make pkg.csize=93425 pkg.size=219296
+"""
+
 class TestLogFormatter(log.LogFormatter):
         """Records log messages to a buffer"""
         def __init__(self):
@@ -1028,6 +1053,22 @@
 set name=variant.arch value=i386 value=sparc
 """
 
+        ref_mf["twovar.mf"] = """
+#
+# This package shares the kernel/strmod path with onevar.mf but has a different
+# set of variants for both the action and the package.  This should not cause
+# an assertion to be raised.
+#
+set name=variant.arch value=sparc value=i386
+set name=pkg.summary value="A packge with two variant values"
+set name=pkg.description value="A package with two values for variant.arch."
+set name=info.classification value=org.opensolaris.category.2008:System/Core
+set name=org.opensolaris.consolidation value=osnet
+set name=variant.opensolaris.zone value=global value=nonglobal
+set name=pkg.fmri value=pkg://opensolaris.org/variant/[email protected],5.11-0.148:20100910T211706Z
+dir group=sys mode=0755 owner=root path=kernel/strmod variant.opensolaris.zone=global
+"""
+
         # A set of manifests to be linted. Note that these are all self
         # consistent, passing all lint checks on their own.
         # Errors are designed to show up when linted against the ref_*
@@ -1153,6 +1194,24 @@
 depend fmri=system/[email protected] type=require
         """
 
+        expected_failures["onevar.mf"] = []
+        lint_mf["onevar.mf"] = """
+#
+# Test that a package which is only published against one variant value doesn't
+# cause an assertion failure when it shares an action with another package.
+# In this case, ketnel/strmod is shared between this package and the reference
+# package twovar.
+#
+set name=pkg.summary value="A package with one variant" variant.arch=i386
+set name=org.opensolaris.consolidation value=osnet variant.arch=i386
+set name=info.classification value="org.opensolaris.category.2008:Drivers/Other Peripherals" variant.arch=i386
+set name=variant.arch value=i386
+set name=variant.opensolaris.zone value=global value=nonglobal variant.arch=i386
+set name=pkg.fmri value=pkg://opensolaris.org/variants/[email protected],5.11-0.148:20100910T195826Z
+set name=pkg.description value="A package published against only one variant value" variant.arch=i386
+dir group=sys mode=0755 owner=root path=kernel/strmod variant.arch=i386 variant.opensolaris.zone=global
+"""
+
         def setUp(self):
 
                 pkg5unittest.ManyDepotTestCase.setUp(self,
--- a/src/tests/api/t_variant.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/tests/api/t_variant.py	Tue Sep 28 15:55:54 2010 -0700
@@ -20,8 +20,11 @@
 # CDDL HEADER END
 #
 
+#
 # Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+#
 
+import copy
 import testutils
 if __name__ == "__main__":
         testutils.setup_environment("../../../proto")
@@ -37,64 +40,121 @@
                 for k in v1:
                         self.assertEqual(sorted(v1[k]), sorted(v2[k]))
 
-        def test_1(self):
-                """Test basic functionality of variants."""
-                v1 = variant.VariantSets(dict([(1, ["a"]), (3, ["b"])]))
-                v2 = variant.VariantSets(dict([(1, ["a"]), (4, ["b"])]))
-                v3 = variant.VariantSets(dict([(1, ["a"]), (3, ["c"])]))
-                v4 = variant.VariantSets(dict([(1, ["b"]), (4, ["v"])]))
-                v5 = variant.VariantSets(dict([(1, ["a"]), (3, ["b"])]))
-                v1_v2_merge = variant.VariantSets(dict([(1, ["a"]), (3, ["b"]),
-                    (4, ["b"])]))
-                v1_v3_merge = variant.VariantSets(dict([(1, ["a"]),
-                    (3, ["b", "c"])]))
-                v4_v1_merge_unknown = variant.VariantSets(dict([(1, ["b"]),
-                    (3, ["b"]), (4, ["v"])]))
+
+        def test_vct(self):
+                """Test functionality of VariantCombinationTemplates."""
+
+                vct_1 = variant.VariantCombinationTemplate(
+                    dict([(1, ["a", "b", "c"]), (2, ["z", "y", "x"])]))
+                self.assertEqual(vct_1[1], set(["a", "b", "c"]))
+                self.assert_(vct_1.issubset(vct_1))
+                self.assertEqual(str(vct_1), ' 1="a","b","c" 2="x","y","z"')
+
+                vct_2 = variant.VariantCombinationTemplate(
+                    dict([(1, ["a", "b"]), (2, ["z", "y"])]))
+                self.assert_(vct_2.issubset(vct_1))
+                self.assert_(not vct_1.issubset(vct_2))
+                vct_2.merge_unknown(vct_1)
+                self.assertEqual(vct_2[1], set(["a", "b"]))
+
+                vct_3 = variant.VariantCombinationTemplate(
+                    dict([(1, ["a", "b", "c"])]))
+                self.assert_(vct_3.issubset(vct_1))
+                self.assert_(not vct_1.issubset(vct_3))
+                vct_3.merge_unknown(vct_1)
+                self.assertEqual(vct_1, vct_3)
+
+        def test_variant_combinations(self):
+                """Test functionality of VariantCombinations."""
 
-                self.assertEqual(v1.issubset(v2), False)
-                self.assertEqual(v1.issubset(v1_v2_merge), True)
-                self.assertEqual(v1.issubset(v1_v3_merge), True)
-                self.assertEqual(v1.difference(v3), dict([(3, set(["b"]))]))
-                # Test for bug 11507, computing a difference when the sets
-                # are not totally overlapping.
-                self.assertEqual(v1.difference(v4),
-                    dict([(1, set(["a"])), (3, set(["b"]))]))
-                self.assertEqual(v1.difference(v1_v3_merge), {})
+                vct_1 = variant.VariantCombinationTemplate(
+                    dict([(1, ["a", "b", "c"]), (2, ["z", "y", "x"])]))
+                vct_2 = variant.VariantCombinationTemplate(
+                    dict([(10, ["l", "m", "n"]), (20, ["p", "q", "r"])]))
+                vct_3 = variant.VariantCombinationTemplate(
+                    dict([(1, ["a"]), (2, ["z"])]))
+                vct_4 = variant.VariantCombinationTemplate(
+                    dict([(1, ["a", "b", "d"]), (2, ["z", "y", "x"])]))
+                set_combo = set([
+                    frozenset([(1, "a"), (2, "z")]),
+                    frozenset([(1, "a"), (2, "y")]),
+                    frozenset([(1, "a"), (2, "x")]),
+                    frozenset([(1, "b"), (2, "z")]),
+                    frozenset([(1, "b"), (2, "y")]),
+                    frozenset([(1, "b"), (2, "x")]),
+                    frozenset([(1, "c"), (2, "z")]),
+                    frozenset([(1, "c"), (2, "y")]),
+                    frozenset([(1, "c"), (2, "x")])])
+                vc1_s = variant.VariantCombinations(vct_1, True)
+                self.assertEqual(vc1_s.sat_set, set_combo)
+                self.assertEqual(vc1_s.not_sat_set, set())
+                self.assert_(not vc1_s.is_empty())
+                
+                vc1_ns = variant.VariantCombinations(vct_1, False)
+                self.assertEqual(vc1_ns.not_sat_set, set_combo)
+                self.assertEqual(vc1_ns.sat_set, set())
+                self.assert_(not vc1_ns.is_empty())
 
-                self.assertEqual(v1.intersects(v2), False)
-                self.assertEqual(v1.intersects(v1_v2_merge), True)
-                self.assertEqual(v1_v2_merge.intersects(v1), False)
-                self.assertEqual(v1.intersects(v1_v3_merge), True)
-                self.assertEqual(v1_v3_merge.intersects(v1), True)
+                self.assertRaises(AssertionError, vc1_ns.simplify, vct_2)
+                self.assertEqual(vc1_ns.not_sat_set, set_combo)
+                self.assertEqual(vc1_ns.sat_set, set())
+                self.assert_(not vc1_ns.is_empty())
+
+                self.assertRaises(AssertionError, vc1_ns.simplify, vct_4)
+                self.assertEqual(vc1_ns.not_sat_set, set_combo)
+                self.assertEqual(vc1_ns.sat_set, set())
+                self.assert_(not vc1_ns.is_empty())
 
-                v4.merge_unknown(v1)
-                self.__check_equal(v4, v4_v1_merge_unknown)
+                vc1_tmp = copy.copy(vc1_ns)
+                self.assert_(not vc1_tmp.is_satisfied())
+                vc1_tmp.mark_all_as_satisfied()
+                self.assert_(vc1_tmp.is_satisfied())
+                self.assertEqual(vc1_tmp.sat_set, set_combo)
 
-                v2.merge(v1)
-                self.__check_equal(v2, v1_v2_merge)
-                v1.merge(v3)
-                self.__check_equal(v1, v1_v3_merge)
-
-                v1.remove_identical(v5)
-                self.__check_equal(v1, dict([(3, ["b", "c"])]))
+                vct3_set_combo = set([frozenset([(1, "a"), (2, "z")])])
+                vc3_ns = variant.VariantCombinations(vct_3, False)
+                self.assertEqual(vc3_ns.not_sat_set, vct3_set_combo)
+                self.assert_(vc3_ns.issubset(vc1_ns, False))
+                self.assert_(not vc1_ns.issubset(vc3_ns, False))
+                self.assert_(vc1_ns.issubset(vc3_ns, True))
+                self.assert_(not vc1_s.issubset(vc3_ns, True))
 
-        def test_get_sat_unset(self):
-                """Verify that get_satisfied() and get_unsatisfied() behave as
-                expected.
-                """
+                vc3_s = variant.VariantCombinations(vct_3, True)
+                vc2_s = variant.VariantCombinations(vct_2, True)
+                self.assert_(vc3_s.intersects(vc1_s))
+                self.assert_(vc3_ns.intersects(vc1_s))
+                self.assert_(not vc3_ns.intersects(vc2_s))
+                self.assert_(not vc3_s.intersects(vc2_s))
+                self.assert_(vc1_s.intersects(vc3_s))
+                intersect = vc3_s.intersection(vc1_s)
+                self.assertEqual(intersect.sat_set, vct3_set_combo)
+                self.assertEqual(intersect.not_sat_set, set())
 
-                v1 = variant.VariantSets(dict([(1, set(["a", "b"])),
-                            (2, set(["c", "d"]))]))
-                self.__check_equal(v1, v1.get_unsatisfied())
-                self.__check_equal(v1.get_satisfied(), dict())
+                # Test that modifing the original does not modify the copy.
+                vc3_ns_copy = copy.copy(vc3_ns)
+                vc3_ns.mark_all_as_satisfied()
+                self.assertEqual(vc3_ns_copy.not_sat_set, vct3_set_combo)
+                self.assertEqual(vc3_ns.not_sat_set, set())
+                self.assertEqual(vc3_ns.sat_set, vct3_set_combo)
+                
+                vct_empty = variant.VariantCombinationTemplate(dict([]))
+                vc_empty = variant.VariantCombinations(vct_empty, True)
+                self.assert_(vc_empty.is_empty())
+                self.assert_(vc_empty.intersects(vc1_ns))
+                self.assert_(vc1_ns.intersects(vc_empty))
 
-                v2 = variant.VariantSets(dict([(1, ["b"]), (2, ["d", "c"])]))
-                v1.mark_as_satisfied(v2)
-                self.__check_equal(v1.get_satisfied(), v2)
+                vc1_ns.mark_as_satisfied(vc3_s)
+                self.assertEqual(vc1_ns.sat_set, vct3_set_combo)
+                self.assertEqual(vc1_ns.not_sat_set, set_combo - vct3_set_combo)
 
-                # neither 2:C nor 2:D satisfied with 1:A
-                self.__check_equal(v1.get_unsatisfied(),
-                    variant.VariantSets(dict([(1, ["a"]), (2, ["c", "d"])])))
+                vc1_s.simplify(vct_1)
+                self.assertEqual(vc1_s.sat_set, set())
+                self.assertEqual(vc1_s.not_sat_set, set())
+
+                vc1_ns_simp = variant.VariantCombinations(vct_1, False)
+                vc1_ns_simp.simplify(vct_1)
+                self.assertEqual(vc1_ns_simp.sat_set, set())
+                self.assertEqual(vc1_ns_simp.not_sat_set, set())
 
 
 if __name__ == "__main__":
--- a/src/tests/cli/t_pkgdep.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/tests/cli/t_pkgdep.py	Tue Sep 28 15:55:54 2010 -0700
@@ -49,10 +49,16 @@
 sr/xpg4/lib/libcurses.so.1
 """
         test_manf_2 = """\
+set name=variant.arch value=foo value=bar
 file NOHASH group=bin mode=0755 owner=root path=usr/xpg4/lib/libcurses.so.1 variant.arch=foo
 file NOHASH group=bin mode=0755 owner=root path=etc/pam.conf
 """
 
+        test_elf_warning_manf = """\
+file NOHASH group=bin mode=0755 owner=root path=usr/xpg4/lib/libcurses.so.1
+file NOHASH group=bin mode=0755 owner=root path=etc/libc.so.1
+"""
+
         int_hardlink_manf = """\
 hardlink path=usr/foo target=../var/log/syslog
 file NOHASH group=sys mode=0644 owner=root path=var/log/syslog
@@ -320,8 +326,10 @@
 depend fmri=pkg:/s-v-bar pkg.debug.depend.file=var/log/authlog pkg.debug.depend.file=var/log/file2 pkg.debug.depend.reason=baz pkg.debug.depend.type=hardlink type=require
 depend fmri=pkg:/s-v-baz-one pkg.debug.depend.file=var/log/authlog pkg.debug.depend.reason=baz pkg.debug.depend.type=hardlink type=require variant.foo=baz variant.num=one
 depend fmri=pkg:/s-v-baz-two pkg.debug.depend.file=var/log/authlog pkg.debug.depend.reason=baz pkg.debug.depend.type=hardlink type=require variant.foo=baz variant.num=two
-depend fmri=pkg:/sep_vars pkg.debug.depend.file=var/log/f3 pkg.debug.depend.reason=b3 pkg.debug.depend.type=hardlink type=require variant.foo=bar
-depend fmri=pkg:/sep_vars pkg.debug.depend.file=var/log/f4 pkg.debug.depend.reason=b3 pkg.debug.depend.type=hardlink type=require variant.foo=baz
+depend fmri=pkg:/sep_vars pkg.debug.depend.file=var/log/f3 pkg.debug.depend.reason=b3 pkg.debug.depend.type=hardlink type=require variant.foo=bar variant.num=one
+depend fmri=pkg:/sep_vars pkg.debug.depend.file=var/log/f3 pkg.debug.depend.reason=b3 pkg.debug.depend.type=hardlink type=require variant.foo=bar variant.num=two
+depend fmri=pkg:/sep_vars pkg.debug.depend.file=var/log/f4 pkg.debug.depend.reason=b3 pkg.debug.depend.type=hardlink type=require variant.foo=baz variant.num=one
+depend fmri=pkg:/sep_vars pkg.debug.depend.file=var/log/f4 pkg.debug.depend.reason=b3 pkg.debug.depend.type=hardlink type=require variant.foo=baz variant.num=two
 depend fmri=pkg:/subset-prov pkg.debug.depend.file=var/log/f6 pkg.debug.depend.file=var/log/f5 pkg.debug.depend.reason=b5 pkg.debug.depend.type=hardlink type=require
 """
 
@@ -511,9 +519,14 @@
 depend fmri=__TBD pkg.debug.depend.file=unsatisfied pkg.debug.depend.path=usr/bin pkg.debug.depend.reason=foo/bar pkg.debug.depend.type=elf type=require
 """
 
-        unsatisfied_error = """\
+        unsatisfied_error_1 = """\
 %s has unresolved dependency 'depend fmri=__TBD pkg.debug.depend.file=unsatisfied pkg.debug.depend.path=usr/bin pkg.debug.depend.reason=foo/bar pkg.debug.depend.type=elf type=require' under the following combinations of variants:
-variant.foo:bar"""
+"""
+
+        unsatisfied_error_2 = """\
+%s has unresolved dependency 'depend fmri=__TBD pkg.debug.depend.file=unsatisfied pkg.debug.depend.path=usr/bin pkg.debug.depend.reason=foo/bar pkg.debug.depend.type=elf type=require' under the following combinations of variants:
+variant.foo:bar
+"""
 
         partially_satisfied_manf = """\
 set name=fmri value=pkg:/partially_satisfied_manf
@@ -630,6 +643,29 @@
 The file to be installed in usr/bin/pkg does not specify a specific version of python either in its installed path nor in its text.  Such a file cannot be analyzed for dependencies since the version of python it will be used with is unknown.  The text of the file is here: %s/usr/bin/pkg.
 """
 
+        bug_16808_manf = """\
+file NOHASH group=bin mode=0755 owner=root path=var/log/syslog variant.opensolaris.zone=global
+hardlink path=var/log/foobar target=syslog
+"""
+        
+        bug_15958_manf = """\
+set name=variant.opensolaris.zone value=global value=nonglobal
+""" + bug_16808_manf
+
+        res_bug_15958 = """\
+depend fmri=__TBD pkg.debug.depend.file=syslog pkg.debug.depend.path=var/log pkg.debug.depend.reason=var/log/foobar pkg.debug.depend.type=hardlink type=require variant.opensolaris.zone=nonglobal
+"""
+
+        bug_16808_error = """\
+The action delivering var/log/syslog is tagged with a variant type or value not tagged on the package. Dependencies on this file may fail to be reported.
+The action's variants are: variant.opensolaris.zone="global"
+The package's variants are: <none>
+"""
+
+        res_elf_warning = """\
+depend fmri=__TBD pkg.debug.depend.file=libc.so.1 pkg.debug.depend.path=lib pkg.debug.depend.path=usr/lib pkg.debug.depend.reason=usr/xpg4/lib/libcurses.so.1 pkg.debug.depend.severity=warning pkg.debug.depend.type=elf type=require
+"""
+
         def setUp(self):
                 pkg5unittest.SingleDepotTestCase.setUp(self)
                 #
@@ -744,8 +780,9 @@
                 tp = self.make_manifest(self.test_manf_2)
                 self.make_proto_text_file("etc/pam.conf", "text")
 
-                self.pkgdepend_generate("-d %s %s" % (self.test_proto_dir, tp))
-                self.check_res(self.res_manf_2, self.output)
+                self.pkgdepend_generate("-m -d %s %s" %
+                    (self.test_proto_dir, tp))
+                self.check_res(self.res_manf_2 + self.test_manf_2, self.output)
                 self.check_res("", self.errout)
 
                 res_path = self.make_manifest(self.output)
@@ -1222,19 +1259,19 @@
 
                 # Generally unsatisfied dependency
                 self.pkgdepend_resolve(" -o %s" % unsat, exit=1)
-                self.check_res(self.unsatisfied_error % unsat, self.errout)
+                self.check_res(self.unsatisfied_error_1 % unsat, self.errout)
 
                 # Dependency that would be satisfied were it not for
                 # mismatched variants
                 self.pkgdepend_resolve(" -o %s %s" % (unsat, satisfying),
                     exit=1)
-                self.check_res(self.unsatisfied_error % unsat, self.errout)
+                self.check_res(self.unsatisfied_error_1 % unsat, self.errout)
 
                 # Partially satisfied dependency (for one variant
                 # value, not another)
                 self.pkgdepend_resolve(" -o %s %s" % (partial, satisfying),
                     exit=1)
-                self.check_res(self.unsatisfied_error % partial, self.errout)
+                self.check_res(self.unsatisfied_error_2 % partial, self.errout)
                 self.check_res("%s\n\n%s\n%s" % (partial, satisfying,
                     self.satisfying_out), self.output)
 
@@ -1416,6 +1453,37 @@
                 self.check_res("", self.output)
                 self.check_res("", self.errout)
 
+        def test_bug_15958(self):
+                """Test that a dependency which is not satisfied internally
+                under certain variants is reported correctly."""
+
+                tp = self.make_manifest(self.bug_15958_manf)
+                self.make_proto_text_file("var/log/syslog", "text")
+                self.pkgdepend_generate("-d %s %s" % (self.test_proto_dir, tp))
+                self.check_res("", self.errout)
+                self.check_res(self.res_bug_15958, self.output)
+
+        def test_bug_16808(self):
+                """Test that if an action uses a variant not declared at the
+                package level, an error is reported."""
+
+                tp = self.make_manifest(self.bug_16808_manf)
+                self.make_proto_text_file("var/log/syslog", "text")
+                self.pkgdepend_generate("-d %s %s" % (self.test_proto_dir, tp),
+                    exit=1)
+                self.check_res(self.bug_16808_error, self.errout)
+
+        def test_elf_warning(self):
+                """Test that if an action uses a variant not declared at the
+                package level, an error is reported."""
+
+                tp = self.make_manifest(self.test_elf_warning_manf)
+                self.make_proto_text_file("etc/libc.so.1", "text")
+                self.make_elf([], "usr/xpg4/lib/libcurses.so.1")
+                self.pkgdepend_generate("-d %s %s" % (self.test_proto_dir, tp))
+                self.check_res("", self.errout)
+                self.check_res(self.res_elf_warning, self.output)
+
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkgdep_resolve.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/tests/cli/t_pkgdep_resolve.py	Tue Sep 28 15:55:54 2010 -0700
@@ -34,9 +34,9 @@
 import pkg.client.api as api
 import pkg.client.progress as progress
 import pkg.flavor.base as base
+from pkg.fmri import PkgFmri
 import pkg.portable as portable
 import pkg.publish.dependencies as dependencies
-from pkg.fmri import PkgFmri
 
 
 class TestApiDependencies(pkg5unittest.SingleDepotTestCase):
@@ -325,28 +325,6 @@
                         else:
                                 raise RuntimeError("Unexpected error:%s" % e)
 
-        def test_bug_15647(self):
-                """Verify that in cases where both the provided manifests and
-                installed image provide a resolution for a given dependency
-                that the dependency is only resolved once for a given variant
-                and that the resolution provided by the manifests is used."""
-
-                self.pkgsend_bulk(self.rurl, self.installed_double_provides)
-                self.api_obj.refresh(immediate=True)
-                self._api_install(self.api_obj, ["double_provides"])
-
-                manifests = [self.make_manifest(x) for x in
-                    (self.newer_double_provides, self.double_deps)]
-
-                pkg_deps, errs = dependencies.resolve_deps(manifests,
-                    self.api_obj)
-
-                self.assertEqual(len(pkg_deps[manifests[1]]), 1)
-                for d in pkg_deps[manifests[1]]:
-                        fmri = PkgFmri(d.attrs["fmri"], build_release="5.11")
-                        if str(fmri).startswith("pkg:/double_provides"):
-                                self.assertEqual(str(fmri.version.branch), "1")
-
         def test_simple_variants_1(self):
                 """Test that variants declared on the actions work correctly
                 when resolving dependencies."""
@@ -362,11 +340,11 @@
                         if d.attrs["fmri"] == "pkg:/s-v-bar":
                                 self.assertEqual(
                                     d.attrs["variant.foo"],
-                                    set(["bar"]))
+                                    "bar")
                         elif d.attrs["fmri"] == "pkg:/s-v-baz":
                                 self.assertEqual(
                                     d.attrs["variant.foo"],
-                                    set(["baz"]))
+                                    "baz")
                         else:
                                 raise RuntimeError("Unexpected fmri %s "
                                     "for dependency %s" %
@@ -399,11 +377,11 @@
                         if d.attrs["fmri"] == "pkg:/s-v-bar":
                                 self.assertEqual(
                                     d.attrs["variant.foo"],
-                                    set(["bar"]))
+                                    "bar")
                         elif d.attrs["fmri"] == "pkg:/s-v-baz":
                                 self.assertEqual(
                                     d.attrs["variant.foo"],
-                                    set(["baz"]))
+                                    "baz")
                         else:
                                 raise RuntimeError("Unexpected fmri %s "
                                     "for dependency %s" %
@@ -423,36 +401,39 @@
                 pkg_deps, errs = dependencies.resolve_deps(
                     [m1_path, m2_path, m3_path, m4_path], self.api_obj)
                 self.assertEqual(len(pkg_deps), 4)
-                self.assertEqual(len(pkg_deps[m1_path]), 3)
+                # This is 5 because the variant.num values are not collapsed
+                # like they could be.
+                self.assertEqual(len(pkg_deps[m1_path]), 5)
                 self.assertEqual(len(pkg_deps[m2_path]), 0)
                 self.assertEqual(len(pkg_deps[m3_path]), 0)
                 self.assertEqual(len(pkg_deps[m4_path]), 0)
                 self.assertEqual(len(errs), 1)
+                vnums = set(["one", "two", "three"])
                 for d in pkg_deps[m1_path]:
                         if d.attrs["fmri"] == "pkg:/s-v-bar":
                                 self.assertEqual(
                                     d.attrs["variant.foo"],
-                                    set(["bar"]))
-                                self.assertEqual(
-                                    "variant.num" in d.attrs, False)
+                                    "bar")
+                                vnums.remove(d.attrs["variant.num"])
                         elif d.attrs["fmri"] == "pkg:/s-v-baz-one":
                                 self.assertEqual(
                                     d.attrs["variant.foo"],
-                                    set(["baz"]))
+                                    "baz")
                                 self.assertEqual(
                                     d.attrs["variant.num"],
-                                    set(["one"]))
+                                    "one")
                         elif d.attrs["fmri"] == "pkg:/s-v-baz-two":
                                 self.assertEqual(
                                     d.attrs["variant.foo"],
-                                    set(["baz"]))
+                                    "baz")
                                 self.assertEqual(
                                     d.attrs["variant.num"],
-                                    set(["two"]))
+                                    "two")
                         else:
                                 raise RuntimeError("Unexpected fmri %s "
                                     "for dependency %s" %
                                     (d.attrs["fmri"], d))
+                self.assertEqual(vnums, set())
 
         def test_multi_file_dependencies(self):
                 def __check_results(pkg_deps, errs, exp_pkg, no_deps, one_dep):
@@ -770,5 +751,27 @@
                         else:
                                 raise RuntimeError("Unexpected error:%s" % e)
 
+        def test_bug_15647(self):
+                """Verify that in cases where both the provided manifests and
+                installed image provide a resolution for a given dependency
+                that the dependency is only resolved once for a given variant
+                and that the resolution provided by the manifests is used."""
+
+                self.pkgsend_bulk(self.rurl, self.installed_double_provides)
+                self.api_obj.refresh(immediate=True)
+                self._api_install(self.api_obj, ["double_provides"])
+
+                manifests = [self.make_manifest(x) for x in
+                    (self.newer_double_provides, self.double_deps)]
+
+                pkg_deps, errs = dependencies.resolve_deps(manifests,
+                    self.api_obj)
+
+                self.assertEqual(len(pkg_deps[manifests[1]]), 1)
+                for d in pkg_deps[manifests[1]]:
+                        fmri = PkgFmri(d.attrs["fmri"], build_release="5.11")
+                        if str(fmri).startswith("pkg:/double_provides"):
+                                self.assertEqual(str(fmri.version.branch), "1")
+
 if __name__ == "__main__":
         unittest.main()
--- a/src/util/publish/pkgdiff.py	Wed Sep 29 11:55:47 2010 +1300
+++ b/src/util/publish/pkgdiff.py	Tue Sep 28 15:55:54 2010 -0700
@@ -164,7 +164,7 @@
                         if res:
                                 return res
                         # sort by variant
-                        res = cmp(sorted(list(a.get_variants())), sorted(list(b.get_variants())))
+                        res = cmp(sorted(list(a.get_variant_template())), sorted(list(b.get_variant_template())))
                         if res:
                                 return res
                 else: