2152 standalone package support needed (on-disk format)
authorShawn Walker <shawn.walker@oracle.com>
Wed, 09 Feb 2011 18:43:21 -0800
changeset 2219 60ad60f7592c
parent 2218 f025ba1faae7
child 2220 99ce0a6bae85
2152 standalone package support needed (on-disk format) 6576 pkg install/image-update support for temporary publisher origins desired 15450 pydoc for pkg.client.api needs to document reset usage requirement 16742 pkg manpage description of info -r not quite right 17761 repository append operation erroneously includes append file when closing transaction 17762 signing certs added to a repository do not have correct file permissions set 17786 .hgignore needs update for coverage 17787 api should allow publishers without origins 17788 pkg should allow relative paths and not require URI syntax for repositories 17789 pkg(1) arrangement and wording could be improved
.hgignore
doc/client_api_versions.txt
doc/on-disk-format.txt
src/brand/attach
src/brand/common.ksh
src/brand/pkgcreatezone
src/client.py
src/gui/modules/misc_non_gui.py
src/man/pkg.1.txt
src/man/pkgrecv.1.txt
src/man/pkgsend.1.txt
src/modules/actions/license.py
src/modules/client/api.py
src/modules/client/api_errors.py
src/modules/client/image.py
src/modules/client/imageconfig.py
src/modules/client/imageplan.py
src/modules/client/progress.py
src/modules/client/publisher.py
src/modules/client/transport/repo.py
src/modules/client/transport/transport.py
src/modules/lint/engine.py
src/modules/misc.py
src/modules/p5p.py
src/modules/pkgtarfile.py
src/modules/publish/transaction.py
src/modules/server/repository.py
src/modules/server/transaction.py
src/pkg/manifests/package%2Fpkg.p5m
src/pkgdep.py
src/pull.py
src/tests/api/t_api.py
src/tests/api/t_api_list.py
src/tests/api/t_p5p.py
src/tests/cli/t_pkg_publisher.py
src/tests/cli/t_pkg_temp_sources.py
src/tests/cli/t_pkgrecv.py
src/tests/pkg5unittest.py
src/util/distro-import/importer.py
--- a/.hgignore	Wed Feb 09 15:25:24 2011 -0800
+++ b/.hgignore	Wed Feb 09 18:43:21 2011 -0800
@@ -32,7 +32,8 @@
 ^src/man/.*\.(1m?|5)$
 ^src/pkg/Makefile.link
 ^src/pkg/pkgtmp/
-^src/tests/.figleaf$
+^src/tests/.coverage$
+^src/tests/htmlcov$
 ^src/tests/ro_data/signing_certs/produced/.*/.*\.csr
 ^src/tests/ro_data/signing_certs/produced/.*\.old
 ^src/tests/ro_data/signing_certs/produced/.*\.tmp
--- a/doc/client_api_versions.txt	Wed Feb 09 15:25:24 2011 -0800
+++ b/doc/client_api_versions.txt	Wed Feb 09 18:43:21 2011 -0800
@@ -1,5 +1,54 @@
+Version 53:
+Incompatible with clients using versions 0-52:
+
+    General changes:
+        * Support for pkg(5) archives has been added; a file URI
+          containing the location of an archive can be used anywhere
+          that a repository location is (except for search).
+
+        * Publishers are no longer required to have origins or mirrors
+          so that sticky state and search order can be retained when
+          installing packages from temporary sources.
+
+        * The client API now supports the use of temporary sources of
+          package during during operations.
+
+    pkg.client.api.ImageInterface has changed as follows:
+        * get_pkg_list() is now a locking operation and cannot be used
+          at the same time as other functions (such as a multi-threaded
+          scenario)
+
+        * get_pkg_categories() is now a locking operation and cannot be
+          used at the same time as other functions (such as a multi-
+          threaded scenario)
+
+        * add_publisher() now permits publishers with no repository
+          origins or mirrors to be added.
+
+        * update_publisher() now permits publishers to be updated even
+          if the new object has no origins or mirrors.
+
+        * info(), get_pkg_categories(), get_pkg_list(), get_manifest(),
+          plan_install(), plan_update(), plan_update_all(), and
+          plan_change_varcets() now have a 'repos' parameter for
+          specifying temporary package repositories for use during
+          the operation.
+
+    pkg.client.progress has changed as follows:
+        * There are new methods for tracking the progress of archive
+          creation: archive_set_goal(), archive_add_progress(),
+          and archive_done().  See 'pydoc pkg.client.progress' for
+          details.
+
+    pkg.client.api_errors has changed as follows:
+        * The PublisherOriginRequired exception class has been removed.
+
+        * The new Exception NoPublisherRepositories was added.  It is
+          raised whenever an attempt to perform a transport operation
+          for a publisher with no transport configuration is made.
+
 Version 52:
-Incompatible with clients using versions 0-50:
+Incompatible with clients using versions 0-51:
     pkg.client.publisher.Publisher has changed as follows:
         * The following functions have been made private:
             add_cert, get_certs_by_name, get_crl, check_extensions
@@ -19,9 +68,12 @@
           pkg.client.api' for details.
 
 Version 50:
-Compatible with clients using versions 46-50:
-    pkg.client.api.ImageInterface has changed as follows:
-        * plan_revert was added
+Compatible with clients using versions 46-49:
+
+    pkg.client.api.ImageInterface changed as follows:
+        * The new function plan_revert() was added to allow planning
+          an operation for reverting specific or tagged packaged
+          files.
 
 Version 49:
 Compatible with clients using versions 46-48:
@@ -423,7 +475,7 @@
         """Used to indicate that a key could not be found."""
 
 Version 15:
-Incompatible with clients using versions 1-14.
+Incompatible with clients using versions 0-14.
 Changes:
     The unfound_fmris variable in the
     pkg.client.api_errors.PlanCreationException class is now called 
@@ -492,13 +544,13 @@
                 the currently selected repository needs to be refreshed.
 
 Version 12:
-Incompatible with clients using versions 1-12
+Incompatible with clients using versions 0-12
 Changes:
     This versions adds local_search and remote_search to the api and removes
     those functions from pkg.client.image.
 
 Version 11:
-Incompatible with clients using versions 1-10
+Incompatible with clients using versions 0-10
 Changes:
    This version changes all parameter names and property names from 'authority'
    to 'publisher'.  For example, parameters named 'auths' were changed to
@@ -613,21 +665,21 @@
                 (excluding those disabled) will be output."""
 
 Version 10:
-Incompatible with clients using versions 1-9
+Incompatible with clients using versions 0-9
 Changes:
     This version changes the interface to info. It removes the action info
     and licenses flag and replaces them with a set of options to allow
     the caller control over which information is retrieved.
 
 Version 9:
-Compatible with clients using versions 1-8
+Compatible with clients using versions 0-8
 Changes:
     This version adds an optional argument to plan_update_all to allow the
     specification of a name for the clone of the BE which is made. It also
     exposes check_be_name as part of the api.
 
 Version 8:
-Compatible with clients using versions 1-7
+Compatible with clients using versions 0-7
 Changes:
     This version introduces InvalidDepotResponseException.  The
     exception is thrown when operations that refresh the catalog
@@ -635,7 +687,7 @@
     should catch this exception and respond appropriately.
 
 Version 7:
-Compatible with clients using versions 1-6
+Compatible with clients using versions 0-6
 Changes:
     Ignore the pkg_client_name parameter passed to api.ImageInterface() if
     pkg.client.global_settings.client_name isn't None.  This latter object
@@ -643,14 +695,14 @@
     pkg_client_name parameter may be ignored or removed in the future.
 
 Version 6:
-Compatible with clients using versions 1-5
+Compatible with clients using versions 0-5
 Changes:
 Adds a new field to PackageInfo, category_info_list, which is a list of 
      PackageCategory objects. These objects contain the scheme and category
      information for packages.
 
 Version 5:
-Compatible with clients using versions 1-4 as long as they have a generic
+Compatible with clients using versions 0-4 as long as they have a generic
 APIException. This is the case for PackageManager and UpdateManaget.
 Changes:
 plan_install and plan_update_all can now raise PermissionsException.
--- a/doc/on-disk-format.txt	Wed Feb 09 15:25:24 2011 -0800
+++ b/doc/on-disk-format.txt	Wed Feb 09 18:43:21 2011 -0800
@@ -126,8 +126,7 @@
 2086 validate that a repository is really a repository in pkg.depotd
 6335 publisher repo with invalid certificate information shouldn't
     prevent querying other repos
-6576 pkg install/image-update support for temporary publisher origins
-    desired
+6576 pkg install/update support for temporary publisher origins desired
 6940 depot support for file:// URI desired
 7213 ability to remove published packages
 7273 manifests should be arranged in a hierarchy by publisher
@@ -532,12 +531,22 @@
             will stand for 'pkg(5) package'.  The format of these
             archives matches that defined by IEEE Std 1003.1, 2004 for
             the pax Interchange Format, with the exception that the
-            first archive entry must not use the optional pax headers
-            allowed by the format, and must contain the index file
-            for the package archive.  The layout can be visualised as
-            follows:
+            first archive entry is tagged with an extended pax archive
+            header that specifies the archive version and the version
+            of the pkg(5) API that was used to write it.  In addition,
+            the file for the first archive entry must be the index
+            file file for the package archive.  The layout can be
+            visualised as follows:
 
             .--------------------------------------------------------.
+            | ustar header for pax header global archive data        |
+            .--------------------------------------------------------.
+            | pax global extended header data for archive            |
+            .--------------------------------------------------------.
+            | ustar header for pax header for archive index file     |
+            .--------------------------------------------------------.
+            | pax extended header data for archive index file        |
+            .--------------------------------------------------------.
             | ustar header for package archive index file            |
             .--------------------------------------------------------.
             | file data for package archive index file               |
@@ -545,29 +554,41 @@
             | remaining archive data                                 |
             .________________________________________________________.
 
+            The archive and API version is stored in the header of the
+            index file instead of the global header for two reasons:
+            first, any headers in the global header are treated as
+            though they apply to every entry in the archive, and
+            secondly, the pax specification states that global headers
+            should not be used with interchange media that could suffer
+            partial data loss during transport.  Since the archive
+            version primarily serves as a way for clients to reliably
+            determine if a "standard" pax archive versus one with an
+            index is being read, this approach seems reasonable.
+
             The reason for this limitation is to ensure that clients
             performing selective archive extraction can be guaranteed
             to find the location and size of the package archive index
             file without knowing the size of the header for the index
-            file in advance (ustar headers are always 512 bytes in
-            size).
+            file in advance (this layout ensures that clients can
+            find the archive index and/or identify the archive in
+            the first 2048 bytes).
 
             In addition, pkg(5) archives in this format make remote,
             selective archive access possible.  For example, a client
-            could request the first 512 bytes of a pkg(5) archive file
-            from a remote repository, then retrieve the archive index
-            file.  Once it has the archive index file, it can then
-            perform a HTTP/1.1 byte-ranges request to selectively
-            transfer the data for a set of specific files from the
-            archive.  This convention also optimises access to the
-            archive for sources that are heavily biased towards
-            sequential reads.
+            could request the first 2048 bytes of a pkg(5) archive file
+            from a remote repository, identify the offsets of the index
+            and then retrieve it using a HTTP/1.1 byte-ranges request.
+            Once it has the archive index file, it can then perform
+            additional byte-range requests to selectively transfer the
+            the data for a set of specific files from the archive.  This
+            convention also optimises access to the archive for sources
+            that are heavily biased towards sequential reads.
 
             The index file must be named using the following template
             and be compressed using the gzip format described by RFCs
             1951 and 1952, and formatted according to section 4.3.8:
 
-                p5p.index.<index_file_number>.<index_version>.gz
+                p5p.index.<index_file_number>.v<index_version>.gz
 
                 <index_file_number> is an integer in string form that
                 indicates which index file this is.  The number only
@@ -579,27 +600,18 @@
                 indicates the version of the index file.  The initial
                 version for this proposal will be '0'.
 
-            If the first file in the archive is found not to be in the
-            layout or format shown above, or any of the index files in
-            the archive are found to not be in a format supported by
-            the client (version too old or too new), the archive will
-            be treated as a standard pax archive and some operations
-            may not be possible or experience degraded performance.
-            The same is also true if the index file is found to not
-            match the archive contents.
+            However, if the first file in the archive is found to not
+            use the layout or format shown above, or any of the index
+            files in the archive are not in a format supported by the
+            client (version too old or too new), the archive must be
+            treated as a standard pax archive and some operations may
+            not be possible or experience degraded performance.  The
+            same is also true if the index file is found to not match
+            the archive contents.
 
-            When creating the archive, or adding to an existing archive,
-            new index gzip files should be zero-padded with an extra 256
-            bytes at the end.  This reserved space is used for fast
-            additions to existing package archives by updating the
-            previous index file with an entry for the new index file.
-            For example, the first index file's last entry should
-            contain the name and offset of the second index file,
-            and so on.
-
-            All pathnames after the first in the archive (if the first
-            file is the archive index file) must conform to the repo-
-            sitory layout specified in section 4.2.2 of this proposal.
+            All entries in the archive (excluding any archive index
+            files) must conform to the repository layout specified in
+            section 4.2.2 of this proposal.
 
             Since a pkg(5) repository can contain one or more packages,
             pkg(5) archive files can also contain the data for one or
@@ -618,7 +630,8 @@
             following format (also referred to as index format version
             0):
 
-                <name>NUL<offset>NUL<entry_size>NUL<size>NUL<typeflag>NL
+                <name>NUL<offset>NUL<entry_size>NUL<size>NUL<typeflag>
+                NULNL
 
                 <name> is a string containing the pathname of the file
                 in the archive using only ascii characters.  It can be
@@ -651,9 +664,9 @@
 
             An example set of entries would appear as follows:
 
-                pkg5.repositoryNUL0NUL546NUL2560NUL0
-                pkgNUL2560NUL0NUL1536NUL5
-                pkg/service%2Ffault-managementNUL4096NUL0NUL1536NUL5
+                pkg5.repositoryNUL0NUL546NUL2560NUL0NUL
+                pkgNUL2560NUL0NUL1536NUL5NUL
+                pkg/service%2Ffault-managementNUL4096NUL0NUL1536NUL5NUL
 
             It should be noted that other possible formats were
             evaluated for the index file, including those based
@@ -666,7 +679,11 @@
             - no streaming support (required entire index file be
               loaded into memory)
 
-            - significantly greater parsing times
+            - significantly greater parsing times using currently
+              available Python libraries
+
+            - required developing an envelope format that could
+              contain the encoded data
 
 5. Proposed Changes:
 
@@ -907,8 +924,8 @@
         data.
 
         To support the specification of temporary origins, the install
-        and image-update subcommands will be modified by adding a '-g'
-        option to specify additional temporary package origin URIs or
+        and update subcommands will be modified by adding a '-g' option
+        to specify additional temporary package origin URIs or
         the path to a pkg(5) archive file or pkg(5) info file.  The
         '-g' option may be specified multiple times.  As an example:
 
@@ -917,11 +934,9 @@
                 -g file:/path/to/bar.p5p \
                 foo bar localpkg
 
-            $ pkg image-update -g /path/to/foo.p5p
-
         pkg(5) archive files used as a source of package data during an
-        install or image-update operation will have their content cached
-        by the client before the operation begins.  Any publishers found
+        install or update operation will have their content cached by
+        the client before the operation begins.  Any publishers found
         in the archive will be temporarily added to the image if they do
         not already exist.  Publishers that were temporarily added but
         not used during the operation will be removed after operation
--- a/src/brand/attach	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/brand/attach	Wed Feb 09 18:43:21 2011 -0800
@@ -177,7 +177,11 @@
 		if [[ -z $origin ]]; then
 			origin=$name
 			name=${publisher.name}
+		elif [[ "$origin" == "None" ]]; then
+			# Publisher with no origins.
+			origin=""
 		fi
+
 		# Use a compound variable to store all the data
 		# relating to a publisher.
 		if [[ -z ${publishers[$name]} ]]; then
@@ -195,14 +199,22 @@
 			get_publisher_attrs ${publisher.name} origin | \
 			    IFS=" " read publisher.sticky publisher.preferred \
 			    publisher.enabled
-			get_pub_secinfo ${publisher.name} | \
-			    read publisher.keyfile publisher.certfile
-			[[ ${publisher.keyfile} != "None" && \
-			    ! -f ${PKG_IMAGE}/${publisher.keyfile} ]] && \
-			    fail_usage "$f_nosuch_key" ${publisher.keyfile}
-			[[ ${publisher.certfile} != "None" && \
-			    ! -f ${PKG_IMAGE}/${publisher.certfile} ]] && \
-			    fail_usage "$f_nosuch_cert" ${publisher.certfile}
+			if [[ -n "$origin" ]]; then
+				get_pub_secinfo ${publisher.name} | \
+				    read publisher.keyfile publisher.certfile
+				[[ ${publisher.keyfile} != "None" && \
+				    ! -f ${PKG_IMAGE}/${publisher.keyfile} ]] && \
+				    fail_usage "$f_nosuch_key" \
+				        ${publisher.keyfile}
+				[[ ${publisher.certfile} != "None" && \
+				    ! -f ${PKG_IMAGE}/${publisher.certfile} ]] && \
+				    fail_usage "$f_nosuch_cert" \
+				        ${publisher.certfile}
+			else
+				# Publisher has no origins.
+				publisher.keyfile="None"
+				publisher.certfile="None"
+			fi
 			publisher_count=publisher_count+1
 			url_count=0
 		fi
--- a/src/brand/common.ksh	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/brand/common.ksh	Wed Feb 09 18:43:21 2011 -0800
@@ -362,7 +362,9 @@
 	typeset utype=$2
 
 	LC_ALL=C $PKG publisher -HF tsv| \
-	    nawk '$5 == "'"$utype"'" && $1 == "'"$pname"'" \
+	    nawk '($5 == "'"$utype"'" || \
+	    ("'"$utype"'" == "origin" && $5 == "")) \
+	    && $1 == "'"$pname"'" \
 	    {printf "%s %s %s\n", $2, $3, $4;}'
 	return 0
 }
@@ -427,11 +429,17 @@
 	fi
 
 	LC_ALL=C $PKG publisher -HF tsv | \
-		nawk '$5 == "'"$utype"'" && \
-		$6 == "online" && \
+		nawk '($5 == "'"$utype"'" || \
+		("'"$utype"'" == "origin" && $5 == "")) && \
 		( "'"$ptype_filter"'" == "" || $3 == "'"$ptype_filter"'" ) \
 		{printf "%s %s\n", $1, $7;}' |
 		while IFS=" " read __publisher __publisher_url; do
+			if [[ "$utype" == "origin" && \
+			    -z "$__publisher_url" ]]; then
+				# Publisher without origins.
+				__publisher_url="None"
+			fi
+
 			if [[ -n "$__pub_prefix" && \
 				"$__pub_prefix" != "$__publisher" ]]; then
 				# Different publisher so emit accumulation and
--- a/src/brand/pkgcreatezone	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/brand/pkgcreatezone	Wed Feb 09 18:43:21 2011 -0800
@@ -220,9 +220,13 @@
 	# mirror level, then this will have to be changed.
 	get_publisher_urls non-preferred origin | \
 	    while IFS="=" read pub pub_urls; do
-		# skip extra publishers that need a key/cert
-		[[ "`get_pub_secinfo $pub`" != "None None" ]] && \
-		    continue
+		if [[ "$pub_urls" != "None" ]]; then
+			# skip extra publishers that need a key/cert
+			[[ "`get_pub_secinfo $pub`" != "None None" ]] && \
+			    continue
+		else
+			pub_urls=""
+		fi
 
 		if [[ -z "$publishers_extra_origins" ]]; then
 			publishers_extra_origins="$pub=$pub_urls"
--- a/src/client.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/client.py	Wed Feb 09 18:43:21 2011 -0800
@@ -86,7 +86,7 @@
         import sys
         sys.exit(1)
 
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 PKG_CLIENT_NAME = "pkg"
 
 JUST_UNKNOWN = 0
@@ -152,47 +152,49 @@
         basic_usage = {}
         adv_usage = {}
 
-        basic_cmds = ["install", "uninstall", "update", "list",
-            "refresh", "version"]
+        basic_cmds = ["refresh", "install", "uninstall", "update", "list",
+            "version"]
 
         basic_usage["install"] = _(
-            "[-nvq] [--accept] [--licenses] [--no-index] [--no-refresh]\n"
-            "            [--deny-new-be | --require-new-be] [--be-name name]\n"
-            "            [--reject pkg_fmri_patter ... ] pkg_fmri_pattern ...")
+            "[-nvq] [-g path_or_uri ...] [--accept] [--licenses]\n"
+            "            [--no-index] [--no-refresh] [--deny-new-be | --require-new-be]\n"
+            "            [--be-name name] [--reject pkg_fmri_pattern ... ]\n"
+            "            pkg_fmri_pattern ...")
         basic_usage["uninstall"] = _(
             "[-nrvq] [--no-index] [--deny-new-be | --require-new-be]\n"
             "            [--be-name name] pkg_fmri_pattern ...")
         basic_usage["update"] = _(
-            "[-fnvq] [--accept] [--be-name name] [--licenses]\n"
-            "            [--deny-new-be | --require-new-be] [--no-index]\n"
-            "            [--no-refresh] [--reject pkg_fmri_pattern ...]"
+            "[-fnvq] [-g path_or_uri ...] [--accept] [--licenses]\n"
+            "            [--no-index] [--no-refresh] [--deny-new-be | --require-new-be]\n"
+            "            [--be-name name] [--reject pkg_fmri_pattern ...]\n"
             "            [pkg_fmri_pattern ...]")
         basic_usage["list"] = _(
-            "[-Hafnsuv] [--no-refresh] [pkg_fmri_pattern ...]")
+            "[-Hafnsuv] [-g path_or_uri ...] [--no-refresh]\n"
+            "            [pkg_fmri_pattern ...]")
         basic_usage["refresh"] = _("[--full] [publisher ...]")
         basic_usage["version"] = ""
 
-        advanced_cmds = ["info", "search", "verify", "fix", "revert",
-            "contents", "image-create", "change-variant", "change-facet",
-            "variant", "facet", "set-property", "add-property-value",
-            "remove-property-value", "unset-property", "property", "",
-            "set-publisher", "unset-publisher", "publisher", "history",
-            "purge-history", "rebuild-index"]
-
-        adv_usage["revert"] = _(
-            "[-nv] [--be-name name] [--deny-new-be | --require-new-be]\n"
-            "            (--tagged tag-name ... | path-to-file ...)")
-
-        adv_usage["info"] = _("[-lr] [--license] [pkg_fmri_pattern ...]")
+        advanced_cmds = ["info", "contents", "search", "", "verify", "fix",
+            "revert", "", "variant", "change-variant", "", "facet",
+            "change-facet", "", "property", "set-property",
+            "add-property-value", "remove-property-value", "unset-property", "",
+            "publisher", "set-publisher", "unset-publisher", "", "history",
+            "purge-history", "", "rebuild-index", "update-format"]
+
+        adv_usage["info"] = \
+            _("[-lr] [-g path_or_uri ...] [--license] [pkg_fmri_pattern ...]")
+        adv_usage["contents"] = _(
+            "[-Hmr] [-a attribute=pattern ...] [-g path_or_uri ...]\n"
+            "            [-o attribute ...] [-s sort_key] [-t action_type ...]\n"
+            "            [pkg_fmri_pattern ...]")
         adv_usage["search"] = _(
             "[-HIaflpr] [-o attribute ...] [-s repo_uri] query")
 
         adv_usage["verify"] = _("[-Hqv] [pkg_fmri_pattern ...]")
         adv_usage["fix"] = _("[--accept] [--licenses] [pkg_fmri_pattern ...]")
-
-        adv_usage["contents"] = _(
-            "[-Hmr] [-a attribute=pattern ...] [-o attribute ...]\n"
-            "            [-s sort_key] [-t action_type ...] [pkg_fmri_pattern ...]")
+        adv_usage["revert"] = _(
+            "[-nv] [--be-name name] [--deny-new-be | --require-new-be]\n"
+            "            (--tagged tag-name ... | path-to-file ...)")
 
         adv_usage["image-create"] = _(
             "[-FPUfz] [--force] [--full|--partial|--user] [--zone]\n"
@@ -202,12 +204,14 @@
             "            [--facet <facet_spec>=[True|False] ...]\n"
             "            (-p|--publisher) [<name>=]<repo_uri> dir")
         adv_usage["change-variant"] = _(
-            "[-nvq] [--accept] [--be-name name] [--licenses]\n"
-            "            [--deny-new-be | --require-new-be] <variant_spec>=<instance> ...")
+            "[-nvq] [-g path_or_uri ...] [--accept] [--licenses]\n"
+            "            [--deny-new-be | --require-new-be] [--be-name name]\n"
+            "            <variant_spec>=<instance> ...")
 
         adv_usage["change-facet"] = _(
-            "[-nvq] [--accept] [--be-name name] [--licenses]\n"
-            "            [--deny-new-be | --require-new-be] <facet_spec>=[True|False|None] ...")
+            "[-nvq] [-g path_or_uri ...] [--accept] [--licenses]\n"
+            "            [--deny-new-be | --require-new-be] [--be-name name]\n"
+            "            <facet_spec>=[True|False|None] ...")
 
         adv_usage["variant"] = _("[-H] [<variant_spec>]")
         adv_usage["facet"] = ("[-H] [<facet_spec>]")
@@ -318,9 +322,10 @@
 def list_inventory(api_inst, args):
         """List packages."""
 
-        opts, pargs = getopt.getopt(args, "Hafnsuv", ["no-refresh"])
+        opts, pargs = getopt.getopt(args, "Hafg:nsuv", ["no-refresh"])
 
         display_headers = True
+        origins = set()
         refresh_catalogs = True
         pkg_list = api.ImageInterface.LIST_INSTALLED
         summary = False
@@ -336,6 +341,8 @@
                 elif opt == "-f":
                         ltypes.add(opt)
                         variants = True
+                elif opt == "-g":
+                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
                 elif opt == "-n":
                         ltypes.add(opt)
                 elif opt == "-s":
@@ -353,6 +360,10 @@
             ("-n", ("-s", "-v")),
         ]
 
+        if origins and "-n" not in ltypes:
+                # Use of -g implies -a unless -n is provided.
+                ltypes.add("-a")
+
         if "-f" in ltypes and "-a" not in ltypes:
                 usage(_("-f may only be used in combination with -a"),
                     cmd="list")
@@ -439,7 +450,7 @@
         ppub = api_inst.get_preferred_publisher().prefix
         try:
                 res = api_inst.get_pkg_list(pkg_list, patterns=pargs,
-                    raise_unmatched=True, variants=variants)
+                    raise_unmatched=True, repos=origins, variants=variants)
                 for pt, summ, cats, states in res:
                         found = True
                         if display_headers:
@@ -1231,15 +1242,18 @@
         the image contents as necessary."""
 
         op = "change-variant"
-        opts, pargs = getopt.getopt(args, "nvq", ["accept", "be-name=",
+        opts, pargs = getopt.getopt(args, "g:nvq", ["accept", "be-name=",
             "licenses", "deny-new-be", "require-new-be"])
 
         accept = quiet = noexecute = show_licenses = False
+        origins = set()
         verbose = 0
         be_name = None
         new_be = None
         for opt, arg in opts:
-                if opt == "-n":
+                if opt == "-g":
+                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
+                elif opt == "-n":
                         noexecute = True
                 elif opt == "-v":
                         verbose = verbose + 1
@@ -1286,7 +1300,7 @@
         try:
                 stuff_to_do = api_inst.plan_change_varcets(variants,
                     facets=None, noexecute=noexecute, be_name=be_name,
-                    new_be=new_be)
+                    new_be=new_be, repos=origins)
         except:
                 ret_code = __api_plan_exception(op, api_inst, noexecute,
                     verbose)
@@ -1318,15 +1332,18 @@
         image as necessary"""
 
         op = "change-facet"
-        opts, pargs = getopt.getopt(args, "nvq", ["accept", "be-name=",
+        opts, pargs = getopt.getopt(args, "g:nvq", ["accept", "be-name=",
             "licenses", "deny-new-be", "require-new-be"])
 
         accept = quiet = noexecute = show_licenses = False
+        origins = set()
         verbose = 0
         be_name = None
         new_be = None
         for opt, arg in opts:
-                if opt == "-n":
+                if opt == "-g":
+                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
+                elif opt == "-n":
                         noexecute = True
                 elif opt == "-v":
                         verbose = verbose + 1
@@ -1388,7 +1405,7 @@
         try:
                 stuff_to_do = api_inst.plan_change_varcets(variants=None,
                     facets=facets, noexecute=noexecute, be_name=be_name,
-                    new_be=new_be)
+                    new_be=new_be, repos=origins)
         except:
                 ret_code = __api_plan_exception(op, api_inst, noexecute,
                     verbose)
@@ -1419,21 +1436,23 @@
         """Attempt to take package specified to INSTALLED state.  The operands
         are interpreted as glob patterns."""
 
-        # XXX Publisher-catalog issues.
         op = "install"
-        opts, pargs = getopt.getopt(args, "nvq", ["accept", "licenses",
+        opts, pargs = getopt.getopt(args, "g:nvq", ["accept", "licenses",
             "no-refresh", "no-index", "deny-new-be", "require-new-be",
             "be-name=", "reject="])
 
         accept = quiet = noexecute = show_licenses = False
         verbose = 0
+        origins = set()
         refresh_catalogs = update_index = True
         new_be = None
         be_name = None
         reject_pats = []
 
         for opt, arg in opts:
-                if opt == "-n":
+                if opt == "-g":
+                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
+                elif opt == "-n":
                         noexecute = True
                 elif opt == "-v":
                         verbose = verbose + 1
@@ -1478,7 +1497,8 @@
         try:
                 stuff_to_do = api_inst.plan_install(pargs,
                     refresh_catalogs, noexecute, update_index=update_index,
-                    be_name=be_name, new_be=new_be, reject_list=reject_pats)
+                    be_name=be_name, new_be=new_be, reject_list=reject_pats,
+                    repos=origins)
         except Exception, e:
                 ret_code = __api_plan_exception(op, api_inst, noexecute,
                     verbose)
@@ -1586,19 +1606,23 @@
         The operands are interpreted as glob patterns."""
 
         op = "update"
-        opts, pargs = getopt.getopt(args, "fnvq", ["accept", "be-name=", "reject=",
-            "licenses", "no-refresh", "no-index", "deny-new-be", "require-new-be"])
+        opts, pargs = getopt.getopt(args, "fg:nvq", ["accept", "be-name=",
+            "reject=", "licenses", "no-refresh", "no-index", "deny-new-be",
+            "require-new-be"])
 
         accept = force = quiet = noexecute = show_licenses = False
         verbose = 0
         refresh_catalogs = update_index = True
         be_name = None
         new_be = None
+        origins = set()
         reject_pats = []
 
         for opt, arg in opts:
                 if opt == "-f":
                         force = True
+                elif opt == "-g":
+                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
                 elif opt == "-n":
                         noexecute = True
                 elif opt == "-v":
@@ -1645,19 +1669,22 @@
                         # allowed by the patterns specified.  (The versions
                         # specified can be older than what is installed.)
                         stuff_to_do = api_inst.plan_update(pargs,
-                            refresh_catalogs=refresh_catalogs, noexecute=noexecute,
-                            be_name=be_name, new_be=new_be, update_index=update_index,
-                            reject_list=reject_pats)
+                            refresh_catalogs=refresh_catalogs,
+                            noexecute=noexecute, be_name=be_name, new_be=new_be,
+                            update_index=update_index, reject_list=reject_pats,
+                            repos=origins)
                 else:
                         # If no packages were specified, or '*' was one of
                         # the patterns provided, attempt to update all
                         # installed packages.
                         stuff_to_do, opensolaris_image = \
                             api_inst.plan_update_all(
-                                refresh_catalogs=refresh_catalogs, noexecute=noexecute,
-                                be_name=be_name, new_be=new_be, force=force,
+                                refresh_catalogs=refresh_catalogs,
+                                noexecute=noexecute, be_name=be_name,
+                                new_be=new_be, force=force,
                                 update_index=update_index,
-                                reject_list=reject_pats)
+                                reject_list=reject_pats,
+                                repos=origins)
         except Exception, e:
                 ret_code = __api_plan_exception(op, api_inst, noexecute,
                     verbose)
@@ -1889,15 +1916,9 @@
                 elif opt == "-r":
                         remote = True
                 elif opt == "-s":
-                        if not misc.valid_pub_url(arg):
-                                orig_arg = arg
-                                arg = "http://" + arg
-                                if not misc.valid_pub_url(arg):
-                                        error(_("%s is not a valid "
-                                            "repository URL.") % orig_arg)
-                                        return EXIT_OOPS
                         remote = True
-                        servers.append({"origin": arg})
+                        servers.append({
+                            "origin": misc.parse_uri(arg, cwd=orig_cwd) })
                 elif opt == "-I":
                         case_sensitive = True
 
@@ -2075,10 +2096,14 @@
         display_license = False
         info_local = False
         info_remote = False
-
-        opts, pargs = getopt.getopt(args, "lr", ["license"])
+        origins = set()
+
+        opts, pargs = getopt.getopt(args, "g:lr", ["license"])
         for opt, arg in opts:
-                if opt == "-l":
+                if opt == "-g":
+                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
+                        info_remote = True
+                elif opt == "-l":
                         info_local = True
                 elif opt == "-r":
                         info_remote = True
@@ -2106,20 +2131,17 @@
         info_needed |= frozenset([api.PackageInfo.DEPENDENCIES])
 
         try:
-                ret = api_inst.info(pargs, info_local, info_needed)
+                ret = api_inst.info(pargs, info_local, info_needed,
+                    repos=origins)
         except api_errors.ImageFormatUpdateNeeded, e:
                 format_update_error(e)
                 return EXIT_OOPS
-        except (api_errors.InvalidPackageErrors,
-            api_errors.ActionExecutionError,
-            api_errors.UnrecognizedOptionsToInfo,
-            api_errors.UnknownErrors,
-            api_errors.PermissionsException), e:
-                error(e)
-                return EXIT_OOPS
         except api_errors.NoPackagesInstalledException:
                 error(_("no packages installed"))
                 return EXIT_OOPS
+        except api_errors.ApiException, e:
+                error(e)
+                return EXIT_OOPS
 
         pis = ret[api.ImageInterface.INFO_FOUND]
         notfound = ret[api.ImageInterface.INFO_MISSING]
@@ -2479,14 +2501,12 @@
         attributes to display; with -s, specify attributes to sort on; with -t,
         specify which action types to list."""
 
-        # XXX Need remote-info option, to request equivalent information
-        # from repository.
-
-        opts, pargs = getopt.getopt(args, "Ha:o:s:t:mfr")
+        opts, pargs = getopt.getopt(args, "Ha:g:o:s:t:mfr")
 
         subcommand = "contents"
         display_headers = True
         display_raw = False
+        origins = set()
         output_fields = False
         remote = False
         local = False
@@ -2504,6 +2524,8 @@
                                 usage(_("-a takes an argument of the form "
                                     "<attribute>=<pattern>"), cmd=subcommand)
                         attr_match.setdefault(attr, []).append(match)
+                elif opt == "-g":
+                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
                 elif opt == "-o":
                         output_fields = True
                         attrs.extend(arg.split(","))
@@ -2516,10 +2538,10 @@
                 elif opt == "-m":
                         display_raw = True
 
-        if not remote and not local:
+        if origins:
+                remote = True
+        elif not remote:
                 local = True
-        elif local and remote:
-                usage(_("-l and -r may not be combined"), cmd=subcommand)
 
         if remote and not pargs:
                 usage(_("contents: must request remote contents for specific "
@@ -2601,12 +2623,13 @@
         notfound = EmptyI
         try:
                 res = api_inst.get_pkg_list(pkg_list, patterns=pargs,
-                    raise_unmatched=True, return_fmris=True, variants=True)
+                    raise_unmatched=True, return_fmris=True, variants=True,
+                    repos=origins)
                 manifests = []
 
                 for pfmri, summ, cats, states in res:
                         manifests.append(api_inst.get_manifest(pfmri,
-                            all_variants=display_raw))
+                            all_variants=display_raw, repos=origins))
         except api_errors.ImageFormatUpdateNeeded, e:
                 format_update_error(e)
                 return EXIT_OOPS
@@ -2867,10 +2890,9 @@
         unset_props = set()
 
         opts, pargs = getopt.getopt(args, "Pedk:c:O:G:g:M:m:p:",
-            ["add-mirror=", "remove-mirror=", "add-origin=",
-            "remove-origin=", "no-refresh", "reset-uuid",
-            "enable", "disable", "sticky", "non-sticky", "search-before=",
-            "search-after=", "approve-ca-cert=",
+            ["add-mirror=", "remove-mirror=", "add-origin=", "remove-origin=",
+            "no-refresh", "reset-uuid", "enable", "disable", "sticky",
+            "non-sticky", "search-before=", "search-after=", "approve-ca-cert=",
             "revoke-ca-cert=", "unset-ca-cert=", "set-property=",
             "add-property-value=", "remove-property-value=", "unset-property="])
 
@@ -2882,19 +2904,19 @@
                 elif opt == "-e" or opt == "--enable":
                         disable = False
                 elif opt == "-g" or opt == "--add-origin":
-                        add_origins.add(arg)
+                        add_origins.add(misc.parse_uri(arg, cwd=orig_cwd))
                 elif opt == "-G" or opt == "--remove-origin":
-                        remove_origins.add(arg)
+                        remove_origins.add(misc.parse_uri(arg, cwd=orig_cwd))
                 elif opt == "-k":
                         ssl_key = arg
                 elif opt == "-O":
                         origin_uri = arg
                 elif opt == "-m" or opt == "--add-mirror":
-                        add_mirrors.add(arg)
+                        add_mirrors.add(misc.parse_uri(arg, cwd=orig_cwd))
                 elif opt == "-M" or opt == "--remove-mirror":
-                        remove_mirrors.add(arg)
+                        remove_mirrors.add(misc.parse_uri(arg, cwd=orig_cwd))
                 elif opt == "-p":
-                        repo_uri = arg
+                        repo_uri = misc.parse_uri(arg, cwd=orig_cwd)
                 elif opt == "-P":
                         preferred = True
                 elif opt == "--reset-uuid":
@@ -2949,8 +2971,8 @@
         if len(pargs) == 0 and not repo_uri:
                 usage(_("requires a publisher name"), cmd="set-publisher")
         elif len(pargs) > 1:
-                usage( _("only one publisher name may be specified"),
-                    cmd="set-publisher",)
+                usage(_("only one publisher name may be specified"),
+                    cmd="set-publisher")
         elif pargs:
                 name = pargs[0]
 
@@ -2963,8 +2985,8 @@
                     "options may not be combined"), cmd="set-publisher")
 
         if search_before and search_after:
-                usage(_("search_before and search_after may not be combined"),
-                      cmd="set-publisher")
+                usage(_("--search-before and --search-after may not be "
+                    "combined"), cmd="set-publisher")
 
         if repo_uri and (add_origins or add_mirrors or remove_origins or
             remove_mirrors or disable != None or not refresh_allowed or
@@ -3089,7 +3111,8 @@
                             duplicate=True)
                         dest_repo = dest_pub.selected_repository
 
-                        if not dest_repo.has_origin(repo_uri):
+                        if dest_repo.origins and \
+                            not dest_repo.has_origin(repo_uri):
                                 failed.append((prefix, _("""\
     The specified repository location is not a known source of publisher
     configuration updates for '%s'.
@@ -3105,8 +3128,11 @@
                                 # assumed to match the URI that the user
                                 # provided.  Since this is an update case,
                                 # nothing special needs to be done.
+                                if not dest_repo.origins:
+                                        add_origins = [repo_uri]
+                                else:
+                                        add_origins = []
                                 add_mirrors = []
-                                add_origins = []
                         else:
                                 # Avoid duplicates by adding only those mirrors
                                 # or origins not already known.
@@ -3204,11 +3230,12 @@
                         if reset_uuid:
                                 pub.reset_client_uuid()
                         repo = pub.selected_repository
-                except api_errors.UnknownPublisher:
-                        if not origin_uri and not add_origins:
-                                return EXIT_OOPS, _("publisher does not exist. "
-                                    "Use -g to define origin URI for new "
-                                    "publisher.")
+                except api_errors.UnknownPublisher, e:
+                        if not origin_uri and not add_origins and \
+                            (remove_origins or remove_mirrors or
+                            remove_prop_values or add_mirrors):
+                                return EXIT_OOPS, str(e)
+
                         # No pre-existing, so create a new one.
                         repo = publisher.Repository()
                         pub = publisher.Publisher(prefix, repositories=[repo])
@@ -3473,7 +3500,6 @@
 
         retcode = EXIT_OK
         if len(pargs) == 0:
-
                 pref_pub = api_inst.get_preferred_publisher()
                 if preferred_only:
                         pubs = [pref_pub]
@@ -3488,10 +3514,10 @@
                         so = api_inst.get_pub_search_order()
                         pub_dict = dict([(p.prefix, p) for p in pubs])
                         pubs = [
-                                pub_dict[name]
-                                for name in so
-                                if name in pub_dict
-                                ]
+                            pub_dict[name]
+                            for name in so
+                            if name in pub_dict
+                        ]
                 # Create a formatting string for the default output
                 # format
                 if format == "default":
@@ -3564,9 +3590,11 @@
                                 set_value(field_data["status"], _("online"))
                                 set_value(field_data["uri"], str(uri))
                                 values = map(get_value,
-                                             sorted(filter(filter_func,
-                                             field_data.values()), sort_fields))
+                                    sorted(filter(filter_func,
+                                    field_data.values()), sort_fields)
+                                )
                                 msg(fmt % tuple(values))
+
                         # Update field_data for each mirror and output
                         # a publisher record in our desired format.
                         for uri in r.mirrors:
@@ -3575,9 +3603,21 @@
                                 set_value(field_data["status"], _("online"))
                                 set_value(field_data["uri"], str(uri))
                                 values = map(get_value,
-                                             sorted(filter(filter_func,
-                                             field_data.values()), sort_fields))
+                                    sorted(filter(filter_func,
+                                    field_data.values()), sort_fields)
+                                )
                                 msg(fmt % tuple(values))
+
+                        if not r.origins and not r.mirrors:
+                                set_value(field_data["type"], "")
+                                set_value(field_data["status"], "")
+                                set_value(field_data["uri"], "")
+                                values = map(get_value,
+                                    sorted(filter(filter_func,
+                                    field_data.values()), sort_fields)
+                                )
+                                msg(fmt % tuple(values))
+
         else:
                 def display_ssl_info(uri):
                         retcode = EXIT_OK
@@ -4532,8 +4572,8 @@
                                         key, value = arg.split("=", 1)
                                 except (AttributeError, ValueError):
                                         usage(_("%(opt)s takes argument of form "
-                                            "name=value, not %(arg)s") % { "opt":  opt,
-                                            "arg": arg })
+                                            "name=value, not %(arg)s") % {
+                                            "opt":  opt, "arg": arg })
                         DebugValues.set_value(key, value)
                 elif opt == "-R":
                         mydir = arg
--- a/src/gui/modules/misc_non_gui.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/gui/modules/misc_non_gui.py	Wed Feb 09 18:43:21 2011 -0800
@@ -40,7 +40,7 @@
 
 # The current version of the Client API the PM, UM and
 # WebInstall GUIs have been tested against and are known to work with.
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 LOG_DIR = "/var/tmp"
 LOG_ERROR_EXT = "_error.log"
 LOG_INFO_EXT = "_info.log"
--- a/src/man/pkg.1.txt	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/man/pkg.1.txt	Wed Feb 09 18:43:21 2011 -0800
@@ -7,25 +7,28 @@
 SYNOPSIS
      /usr/bin/pkg [options] command [cmd_options] [operands]
 
-     /usr/bin/pkg install [-nvq] [--accept] [--licenses] [--no-index]
-         [--no-refresh] [--deny-new-be | --require-new-be] [--be-name name]
-         [--reject pkg_fmri_pattern] pkg_fmri_pattern ...
+     /usr/bin/pkg refresh [--full] [publisher ...]
+
+     /usr/bin/pkg install [-nvq] [-g path_or_uri ...] [--accept] [--licenses]
+         [--no-index] [--no-refresh] [--deny-new-be | --require-new-be]
+         [--be-name name] [--reject pkg_fmri_pattern ...] pkg_fmri_pattern ...
 
      /usr/bin/pkg uninstall [-nrvq] [--no-index]
          [--deny-new-be | --require-new-be] [--be-name name]
          pkg_fmri_pattern ...
 
-     /usr/bin/pkg update [-fnvq] [--accept] [--be-name name]
-         [--deny-new-be | --require-new-be] [--licenses] [--no-index]
-         [--no-refresh] [--reject pkg_fmri_pattern] [pkg_fmri_pattern ...]
-
-     /usr/bin/pkg refresh [--full] [publisher ...]
+     /usr/bin/pkg update [-fnvq] [-g path_or_uri ...] [--accept] [--licenses]
+         [--no-index] [--no-refresh] [--deny-new-be | --require-new-be]
+         [--be-name name] [--reject pkg_fmri_pattern ...]
+         [pkg_fmri_pattern ...]
 
-     /usr/bin/pkg contents [-Hmr] [-a attribute=pattern ...]
-         [-o attribute ...] [-s sort_key] [-t action_type ...]
+     /usr/bin/pkg list [-Hafnsuv] [-g path_or_uri ...] [--no-refresh]
+         [pkg_fmri_pattern ...]
+     /usr/bin/pkg info [-lr] [-g path_or_uri ...] [--license]
          [pkg_fmri_pattern ...]
-     /usr/bin/pkg info [-lr] [--license] [pkg_fmri_pattern ...]
-     /usr/bin/pkg list [-Hafnsuv] [--no-refresh] [pkg_fmri_pattern ...]
+     /usr/bin/pkg contents [-Hmr] [-a attribute=pattern ...]
+         [-g path_or_uri ...] [-o attribute ...] [-s sort_key]
+         [-t action_type ...] [pkg_fmri_pattern ...]
      /usr/bin/pkg search [-HIaflpr] [-o attribute ...] [-s repo_uri]
          query
 
@@ -34,29 +37,23 @@
      /usr/bin/pkg revert [-nv] [--be-name name] [--deny-new-be | --require-new-be]
          (--tagged tag-name ... | path-to-file ...)
 
-     /usr/bin/pkg image-create [-FPUfz] [--force]
-         [--full|--partial|--user] [--zone] [-k ssl_key] [-c ssl_cert]
-         [--no-refresh] [--variant <variant_spec>=<instance> ...]
-         [-g uri|--origin=uri ...] [-m uri|--mirror=uri ...]
-         [--facet <facet_spec>=[True|False] ...]
-         (-p|--publisher) [<name>=]<repo_uri> dir
-
      /usr/bin/pkg variant [-H] [<variant_spec>]
-     /usr/bin/pkg change-variant [-nvq] [--accept]
-         [--deny-new-be | --require-new-be] [--be-name name]
-         [--licenses] <variant_spec>=<instance> ...
+     /usr/bin/pkg change-variant [-nvq] [-g path_or_uri ...] [--accept]
+         [--licenses] [--deny-new-be | --require-new-be] [--be-name name]
+         <variant_spec>=<instance> ...
 
      /usr/bin/pkg facet [-H] [<facet_spec>]
-     /usr/bin/pkg change-facet [-nvq] [--accept] [--be-name name]
-         [--deny-new-be | --require-new-be]
-         [--licenses] <facet_spec>=[True|False|None] ...
+     /usr/bin/pkg change-facet [-nvq] [-g path_or_uri ...] [--accept]
+         [--licenses] [--deny-new-be | --require-new-be] [--be-name name]
+         <facet_spec>=[True|False|None] ...
 
+     /usr/bin/pkg property [-H] [propname ...]
      /usr/bin/pkg set-property propname propvalue
      /usr/bin/pkg add-property-value propname propvalue
      /usr/bin/pkg remove-property-value propname propvalue
      /usr/bin/pkg unset-property propname ...
-     /usr/bin/pkg property [-H] [propname ...]
 
+     /usr/bin/pkg publisher [-HPn] [publisher ...]
      /usr/bin/pkg set-publisher [-Ped] [-k ssl_key] [-c ssl_cert]
          [-g origin_to_add|--add-origin=origin_to_add ...]
          [-G origin_to_remove|--remove-origin=origin_to_remove ...]
@@ -74,9 +71,9 @@
          [--unset-property name_of_property_to_delete]
          [publisher]
      /usr/bin/pkg unset-publisher publisher ...
-     /usr/bin/pkg publisher [-HPn] [publisher ...]
 
-     /usr/bin/pkg history [-Hl] [-t [time|time-time],...] [-o column,...] [-n number]
+     /usr/bin/pkg history [-Hl] [-t [time|time-time],...] [-o column,...]
+         [-n number]
      /usr/bin/pkg purge-history
 
      /usr/bin/pkg rebuild-index
@@ -86,14 +83,21 @@
      /usr/bin/pkg version
      /usr/bin/pkg help
 
+     /usr/bin/pkg image-create [-FPUfz] [--force]
+         [--full|--partial|--user] [--zone] [-k ssl_key] [-c ssl_cert]
+         [--no-refresh] [--variant <variant_spec>=<instance> ...]
+         [-g path_or_uri|--origin=path_or_uri ...]
+         [-m uri|--mirror=uri ...] [--facet <facet_spec>=[True|False] ...]
+         (-p|--publisher) [<name>=]<repo_uri> dir
+
 DESCRIPTION
      pkg is the retrieval client for the image packaging system.  With
      a valid configuration, pkg can be invoked to create locations for
      packages to be installed, called 'images', and install packages
      into those images.  Packages are published by publishers, who may
-     make their packages available at one or more repositories.  pkg,
-     then, retrieves packages from a publisher's repository and
-     installs them into an image.
+     make their packages available at one or more repositories, or in
+     package archives.  pkg, then, retrieves packages from a publisher's
+     repository or package archives and installs them into an image.
 
      A publisher is a forward domain name that can be used to identify a
      person, group of persons, or an organization as the source of one or
@@ -110,9 +114,9 @@
      "http://example.org/repository".
 
      pkg can also uninstall packages, refresh publisher metadata (such as
-     catalogs), validate package installation in an image, and query the
-     image for various tokens.  These queries can also be made of pkg(5)
-     repositories.
+     the list of available packages), validate package installation in an
+     image, and query the image for various tokens.  These queries can also
+     be made of pkg(5) repositories.
 
      Images can be of three types: full images, capable of providing a
      complete system; partial images, which are linked to a full image
@@ -136,58 +140,20 @@
 SUBCOMMANDS
      The following subcommands are supported:
 
-     image-create [-FPUfz] [--force] [--full|--partial|--user] [--zone]
-       [-k ssl_key] [-c ssl_cert] [--no-refresh]
-       [--variant <variant_spec>=<instance> ...]
-       [-g uri|--origin=uri ...] [-m uri|--mirror=uri ...]
-       [--facet <facet_spec>=[True|False] ...]
-       (-p|--publisher) [<name>=]<repo_uri> dir
-          Create, at location given by dir, an image suitable for package
-          operations.  The default image type is user, as given by the -U
-          (--user) option.  The image type may be set to a full image (-F
-          or --full) or to a partial image (-P or --partial) linked to the
-          full image enclosing the given dir path.  Additional origins can
-          be specified using -g or --origin, while additional mirrors can
-          be specified using -m or --mirror.
+     refresh [--full] [publisher ...]
 
-          A package repository URI must be provided using the -p or
-          --publisher option.  If a publisher name is also provided, then
-          only that publisher will be added when the image is created.  If
-          a publisher name is not provided, then all publishers known by the
-          specified repository will be added to the image.  An attempt to
-          retrieve the catalog associated with this publisher will be made
-          following the initial creation operations.
-
-          For publishers using client SSL authentication, a client key and
-          client certificate may be registered via the -c and -k options,
-          and will be used for all publishers added during image creation.
+          Updates the client's list of available packages and publisher
+          metadata for each publisher specified.  If no publishers are
+          specified, the operation will be performed for all publishers.
 
-          If the image is to be run within nonglobal zone context, then
-          the -z (--zone) option can be used to set an appropriate filter.
-
-          With -f (--force), force the creation of an image over an existing
-          image.  This option should be used with care; any existing image
-          metadata found will be lost (such as cached packages) except for
-          SSL certificates and keys used by the client.
-
-          With --no-refresh, do not attempt to contact the repositories for
-          the image's publishers to retrieve publisher metadata (e.g.
-          catalogs).
+          With --full, force a full retrieval of all publisher metadata,
+          instead of attempting an incremental update, and request that
+          any proxies used during the operation ignore cached data.  This
+          option only exists for troubleshooting purposes and should not
+          be used on a regular basis.
 
-          With --variant, set the specified variant to the indicated value.
-
-          With --facet, set the specified facet to the indicated value.
-
-     refresh [--full] [publisher ...]
-          Retrieve updates to the metadata (e.g. catalogs) for each publisher
-          specified.  When given no arguments, retrieves updates for each
-          publisher registered within the image.
-
-          With --full, retrieve all publisher metadata instead of attempting an
-          incremental update.
-
-     install [-nvq] [--accept] [--licenses] [--no-index] [--no-refresh]
-       [--deny-new-be | --require-new-be] [--be-name]
+     install [-nvq] [-g path_or_uri ...] [--accept] [--licenses] [--no-index]
+       [--no-refresh] [--deny-new-be | --require-new-be] [--be-name]
        [--reject pkg_fmri_pattern ...] pkg_fmri_pattern ...
 
           Installs and updates packages to the newest version that match
@@ -199,13 +165,19 @@
           determines which files to preserve, and how they are preserved
           during package operations, see "File Actions" in pkg(5).
 
-          With the -n option, execute the requested operation but make
-          no persistent changes to the image.
+          With -g, use the specified package repository or archive as an
+          additional source of package data for the operation.  This option
+          may be specified multiple times.
+
+          With -n, perform a trial run of the operation with no package
+          changes made.
 
-          With the -v option, issue verbose progress messages during
-          the requested operation.  This option may be repeated for
-          increased effect.  With the -q option, issue no progress
-          messages during the requested operation.
+          With -q, hide progress messages during the requested operation.
+
+          With -v, issue verbose progress messages during the requested
+          operation and display detailed planning information.  This
+          option may be specified multiple times to increase the amount
+          of planning information displayed.
 
           With --accept, you indicate that you agree to and accept the
           terms of the licenses of the packages that are updated or
@@ -220,9 +192,9 @@
           With --no-index, do not update the search indices after the
           operation has completed successfully.
 
-          With --no-refresh, do not attempt to contact the
-          repositories for the image's publishers to retrieve
-          publisher metadata (e.g. catalogs).
+          With --no-refresh, do not attempt to contact the repositories
+          for the image's publishers to retrieve the newest list of
+          available packages and other metadata.
 
           With --be-name, rename the newly created boot environment to
           be the argument given.  This option is only valid if a new
@@ -254,8 +226,8 @@
           For all other options, refer to the install command above
           for usage and their effects.
 
-     update [-fnvq] [--accept] [--be-name name] [--licenses]
-       [--no-index] [--no-refresh] [--deny-new-be | --require-new-be]
+     update [-fnvq] [-g path_or_uri ...] [--accept] [--licenses] [--no-index]
+       [--no-refresh] [--deny-new-be | --require-new-be] [--be-name name]
        [--reject pkg_fmri_pattern ... ] [pkg_fmri_pattern ...]
 
           With no arguments, or if '*' is one of the patterns provided,
@@ -287,181 +259,9 @@
           For all other options, refer to the install command above for
           usage and their effects.
 
-     revert [-nv] [--be-name name] [--deny-new-be | --require-new-be]
-         (--tagged tag-name ... | path-to-file ...)
-          Revert files to their as-delivered condition.  Either all
-          files tagged with a particular value, or individual files 
-          may be reverted. File ownership and protections are also 
-          restored. Caution: reverting some editable files to their 
-          default values may make the system unbootable, or cause
-	  other malfunctions.
-
-          For all other options, refer to the install command above for
-          usage and their effects.
-
-     info [-lr] [--license] [pkg_fmri_pattern ...]
-          Display information about packages in a human-readable form.
-          Multiple FMRI patterns may be specified; with no patterns,
-          display information on all installed packages in the image.
-
-          With -l, use the data available from locally installed packages.
-          This is the default.
-
-          With -r, retrieve the data from the repositories of the image's
-          configured publishers.  Note that you must specify one or more
-          package patterns in this case.
-
-          With --license, print out the license text(s) for the package.
-          This may be combined with -l or -r.
-
-     contents [-Hmr] [-a attribute=pattern ...] [-o attribute ...]
-       [-s sort_key] [-t action_type ...] [pkg_fmri_pattern ...]
-          Display the contents (action attributes) of packages in the
-          current image.  By default, only the path attribute is displayed,
-          but the attribute set may be determined with the -o option.  The
-          -o option may be specified multiple times, or multiple attributes
-          may be specified as the argument to one -o option by separating
-          the attribute names with commas.  Only actions which have the
-          requested attributes will be displayed.  The -m option may
-          also be used, as a shorthand for '-Ho action.raw'.
-
-          The -a option allows you to limit the output to those actions
-          which have an attribute named in the option argument the value of
-          which matches the (glob) pattern in the option argument
-          (following the attribute name with an equals sign).  If multiple
-          -a options are given, then actions matching any of them will be
-          displayed.
-
-          The -s option specifies the attribute by which the listing should
-          be sorted.
-
-          The -t option limits the action types which will be displayed.
-
-          The -H option causes the headers to be omitted.
-
-          The -r option retrieves the requested data from the repositories
-          of the image's configured publishers.  This option is intended
-          to be used when the named packages are not already installed.
-
-          With no arguments, the output includes all installed packages.
-          Alternatively, multiple FMRI patterns may be specified, which
-          restricts the display to the contents of the matching packages.
-          When using -r, one or more pkg_fmri_patterns must be specified.
-
-          Several special "pseudo" attribute names are available for
-          convenience:
-
-          action.hash           Corresponds to the value of the action's
-                                hash, if the action carries a payload.
-
-          action.key            Corresponds to the value of the action's
-                                key attribute.  For example, for a file
-                                action, this is the path to the file.
-
-          action.name           Corresponds to the name of the action.
-                                For example, for a file action, this is
-                                "file"
-
-          action.raw            Corresponds to the complete contents of
-                                the action as represented in the package
-                                manifest.  This corresponds to the
-                                lines of output of 'pkg contents -m'
-
-          pkg.fmri              Corresponds to the full form FMRI of the
-                                package containing the action, such as
-                                pkg://extra/[email protected],5.11-0.101:20090702T175410Z
-
-          pkg.name              Corresponds to the name of the package
-                                containing the action, such as "SUNWcs"
+     list [-Hafnsuv] [-g path_or_uri ...] [--no-refresh]
+       [pkg_fmri_pattern ...]
 
-          pkg.publisher         Corresponds to the publisher of the
-                                the package containing the action, such
-                                as "opensolaris.org"
-
-          pkg.shortfmri         Corresponds to the short form FMRI of the
-                                package containing the action, such as
-                                pkg://opensolaris.org/[email protected]
-
-          The contents and search subcommands are related: both are used to
-          query the system for the contents of packages.  The contents
-          subcommand displays actions in one or more packages, filtering
-          the output based on the options chosen by the user.  The search
-          subcommand approaches the query from the other direction, looking
-          for packages which contain a user-supplied token.
-
-          Each subcommand is capable of formulating some queries of which
-          the other is capable.  Care should be taken in choosing the
-          subcommand, as a given query may be more naturally formulated in
-          one than in the other.
-
-     search [-HIaflpr] [-o attribute ...] [-s repo_uri] query
-          Search for matches to the query, and display the results.
-          Which tokens are indexed are action-dependent, but may
-          include content hashes and pathnames.  By default, queries are
-          interpreted as a series of terms to be matched exactly.  The
-          '?' and '*' characters can be used as glob(3C)-style
-          wildcards, allowing more flexible query matches.
-
-          With -H, omit the headers.
-
-          With -I, use a case-sensitive search.
-
-          By default, and with -a, perform the search and display information
-          about the matching actions.
-
-          By default, search prunes results from packages older than the
-          currently installed version and from package versions excluded by
-          current incorporations.  Use -f to show all results, regardless of
-          package version.
-
-          With -l, search the image's installed packages.
-
-          With -o, the columns of the results may be controlled.  The
-          -o option may be specified multiple times, or multiple attributes
-          may be specified as the argument to one -o option by separating
-          the attribute names with commas.  In addition to the "pseudo"
-          attributes outlined above, more are defined for search results:
-
-          search.match          Corresponds to the string which matched the
-                                search query.
-
-          search.match_type     Corresponds to the attribute which contained
-                                the string that matched the search query.
-
-          With -p, display packages which have some actions that match each
-          query term.  Using this option is equivalent to putting '<>' around
-          each term  in the query.  (For a description of the '<>' operator,
-          please see below.)
-
-          By default, and with -r, search the repositories corresponding
-          to the image's publishers.
-
-          With -s, search the pkg(5) repository located at the given URI.
-          This may be specified multiple times.
-
-          Both -l and -r (or -s) may be specified together, in which case both
-          local and remote searches will be performed.
-
-          In addition to simple token matching and wildcard search, a more
-          complicated query language is supported.  Phrases may be searched for
-          by using ' or ".  Note: Please make sure to take your shell into
-          account so that pkg actually sees the ' or ".
-
-          Boolean search using AND and OR is supported.  Field, or structured,
-          queries are supported.  The syntax for these is
-          pkg_name:action_type:key:token.  Missing fields are implicitly
-          wildcarded.  A search for :basename:pkg would match all actions
-          types in all packages with a key of basename and which matched
-          the token 'pkg'.  Explicit wildcards are supported in the pkg_name
-          and token fields, action_type and key must match exactly.
-
-          To convert actions to the packages which contain those actions,
-          use '<>'.  With the -a option, Searching for 'token' results in
-          information about the actions matching token, while searching for
-          '<token>' results in a list of packages containing actions which
-          matched token.
-
-     list [-Hafnsuv] [--no-refresh] [pkg_fmri_pattern ...]
           Display a list of packages in the current image, including
           state and other information.  By default, package variants
           for a different architecture or zone type are excluded.
@@ -516,24 +316,214 @@
           variants regardless of incorporation constraints or installed
           state.
 
+          With -g, use the specified package repository or archive as the
+          source of package data for the operation.  This option may be
+          specified multiple times.  Use of -g implies -a if -n is not
+          specified.
+
           With -n, display the newest versions of all known packages,
           regardless of installed state.
 
           With -s, display a one-line short-form giving the package name
-          and description.  This option may be used with -a, -n, -u or
-          -v.
+          and summary.  This option may be used with -a, -n, -u or -v.
 
-          With -u, list only packages with newer versions available.
+          With -u, list only packages with newer versions available.  This
+          option may not be used with -g.
 
           With -v, show full package FMRIs, including publisher and
           complete version, all in the first column (the VERSION column
           disappears).  This option may be used with -a, -n, or -u.
 
           With --no-refresh, do not attempt to contact the repositories
-          for the image's publishers to retrieve publisher metadata (e.g.
-          catalogs).
+          for the image's publishers to retrieve the newest list of
+          available packages.
+
+     info [-lr] [-g path_or_uri ...] [--license] [pkg_fmri_pattern ...]
+
+          Display information about packages in a human-readable form.
+          Multiple FMRI patterns may be specified; with no patterns,
+          display information on all installed packages in the image.
+
+          With -g, use the specified package repository or archive as the
+          source of package data for the operation.  This option may be
+          specified multiple times.  Use of -g implies -r.
+
+          With -l, only display information for installed packages.  This
+          is the default.
+
+          With -r, match packages based on the newest available versions,
+          retrieving information for packages not currently installed (if
+          necessary) from the repositories of the image's configured
+          publishers.  At least one package must be specified when using
+          this option.  Without -r, only installed packages are displayed
+          by default.
+
+          With --license, display the license texts for the packages.
+          This may be combined with -l or -r.
+
+     contents [-Hmr] [-a attribute=pattern ...] [-g path_or_uri ...]
+       [-o attribute ...] [-s sort_key] [-t action_type ...]
+       [pkg_fmri_pattern ...]
+
+          Display the contents (action attributes) of packages in the
+          current image.  By default, only the path attribute is displayed,
+          but the attribute set may be determined with the -o option.  The
+          -o option may be specified multiple times, or multiple attributes
+          may be specified as the argument to one -o option by separating
+          the attribute names with commas.  Only actions which have the
+          requested attributes will be displayed.  The -m option may
+          also be used, as a shorthand for '-Ho action.raw'.
+
+          With -a, limit the output to those actions which have an attribute
+          named in the option argument with a value that matches the (glob)
+          pattern in the option argument (following the attribute name with
+          an equals sign).  If multiple -a options are given, then actions
+          matching any of them will be displayed.
+
+          With -g, use the specified package repository or archive as the
+          source of package data for the operation.  This option may be
+          specified multiple times.  Use of -g implies -r.
+
+          With -r, match packages based on the newest available versions,
+          retrieving information for packages not currently installed (if
+          necessary) from the repositories of the image's configured
+          publishers.  At least one package must be specified when using
+          this option.  Without -r, only installed packages are displayed
+          by default.
+
+          With -s, sort actions by the specified action attribute.  If not
+          provided, the default is to sort by path.  This option may be
+          specified multiple times.
+
+          With -t, only list actions of the type specified.  Multiple types
+          may be specified in a comma-separated list.  This option may be
+          specified multiple times.
+
+          With -H, omit the headers from the listing.
+
+          With no arguments, the output includes all installed packages.
+          Alternatively, multiple FMRI patterns may be specified, which
+          restricts the display to the contents of the matching packages.
+          When using -r, one or more pkg_fmri_patterns must be specified.
+
+          Several special "pseudo" attribute names are available for
+          convenience:
+
+          action.hash           Corresponds to the value of the action's
+                                hash, if the action carries a payload.
+
+          action.key            Corresponds to the value of the action's
+                                key attribute.  For example, for a file
+                                action, this is the path to the file.
+
+          action.name           Corresponds to the name of the action.
+                                For example, for a file action, this is
+                                "file"
+
+          action.raw            Corresponds to the complete contents of
+                                the action as represented in the package
+                                manifest.  This corresponds to the
+                                lines of output of 'pkg contents -m'.
+
+          pkg.fmri              Corresponds to the full form FMRI of the
+                                package containing the action, such as
+                                pkg://extra/[email protected],5.11-0.101:
+                                20090702T175410Z.
+
+          pkg.name              Corresponds to the name of the package
+                                containing the action, such as "SUNWcs".
+
+          pkg.publisher         Corresponds to the publisher of the
+                                the package containing the action, such
+                                as "opensolaris.org".
+
+          pkg.shortfmri         Corresponds to the short form FMRI of the
+                                package containing the action, such as
+                                pkg://opensolaris.org/[email protected].
+
+          The contents and search subcommands are related: both are used to
+          query the system for the contents of packages.  The contents
+          subcommand displays actions in one or more packages, filtering
+          the output based on the options chosen by the user.  The search
+          subcommand approaches the query from the other direction, looking
+          for packages which contain a user-supplied token.
+
+          Each subcommand is capable of formulating some queries of which
+          the other is capable.  Care should be taken in choosing the
+          subcommand, as a given query may be more naturally formulated in
+          one than in the other.
+
+     search [-HIaflpr] [-o attribute ...] [-s repo_uri] query
+
+          Search for matches to the query, and display the results.
+          Which tokens are indexed are action-dependent, but may
+          include content hashes and pathnames.  By default, queries are
+          interpreted as a series of terms to be matched exactly.  The
+          '?' and '*' characters can be used as glob(3C)-style
+          wildcards, allowing more flexible query matches.
+
+          With -H, omit the headers.
+
+          With -I, use a case-sensitive search.
+
+          By default, and with -a, perform the search and display information
+          about the matching actions.
+
+          By default, search prunes results from packages older than the
+          currently installed version and from package versions excluded by
+          current incorporations.  Use -f to show all results, regardless of
+          package version.
+
+          With -l, search the image's installed packages.
+
+          With -o, the columns of the results may be controlled.  The
+          -o option may be specified multiple times, or multiple attributes
+          may be specified as the argument to one -o option by separating
+          the attribute names with commas.  In addition to the "pseudo"
+          attributes outlined above, more are defined for search results:
+
+          search.match          Corresponds to the string which matched the
+                                search query.
+
+          search.match_type     Corresponds to the attribute which contained
+                                the string that matched the search query.
+
+          With -p, display packages which have some actions that match each
+          query term.  Using this option is equivalent to putting '<>' around
+          each term  in the query.  (For a description of the '<>' operator,
+          please see below.)
+
+          By default, and with -r, search the repositories corresponding
+          to the image's publishers.
+
+          With -s, search the pkg(5) repository located at the given URI.
+          This may be specified multiple times.  Package archives are not
+          supported.
+
+          Both -l and -r (or -s) may be specified together, in which case both
+          local and remote searches will be performed.
+
+          In addition to simple token matching and wildcard search, a more
+          complicated query language is supported.  Phrases may be searched for
+          by using ' or ".  Note: Please make sure to take your shell into
+          account so that pkg actually sees the ' or ".
+
+          Boolean search using AND and OR is supported.  Field, or structured,
+          queries are supported.  The syntax for these is
+          pkg_name:action_type:key:token.  Missing fields are implicitly
+          wildcarded.  A search for :basename:pkg would match all actions
+          types in all packages with a key of basename and which matched
+          the token 'pkg'.  Explicit wildcards are supported in the pkg_name
+          and token fields, action_type and key must match exactly.
+
+          To convert actions to the packages which contain those actions,
+          use '<>'.  With the -a option, Searching for 'token' results in
+          information about the actions matching token, while searching for
+          '<token>' results in a list of packages containing actions which
+          matched token.
 
      verify [-Hqv] [pkg_fmri_pattern ...]
+
           Validate the installation of packages in the current image.
           Please note that verification of installed package content is
           based on a custom content analysis that may return different
@@ -546,16 +536,50 @@
 
           With -v, include informational messages regarding packages.
 
+     fix [--accept] [--licenses] [pkg_fmri_pattern ...]
+
+          Fix any errors reported by pkg verify.  Please note that
+          verification of installed package content is based on a
+          custom content analysis that may return different results
+          than those of other programs.
+
+          With --accept, you indicate that you agree to and accept the
+          terms of the licenses of the packages that are updated or
+          installed.  If you do not provide this option, and any package
+          licenses require acceptance, the operation will fail.
+
+          With --licenses, display all of the licenses for the packages that
+          will be installed or updated as part of this operation.
+
+     revert [-nv] [--be-name name] [--deny-new-be | --require-new-be]
+          (--tagged tag-name ... | path-to-file ...)
+          Revert files to their as-delivered condition.  Either all
+          files tagged with a particular value, or individual files
+          may be reverted. File ownership and protections are also
+          restored. Caution: reverting some editable files to their
+          default values may make the system unbootable, or cause
+          other malfunctions.
+
+          For all other options, refer to the install command above for
+          usage and their effects.
+
      variant [-H] [<variant_spec> ...]
+
           Display the current values of all variants, or with arguments,
           only the variants specified
 
           With -H, omit the headers from the listing.
 
-     change-variant [-nvq] [--accept] [--be-name name] [--licenses]
+     change-variant [-nvq] [-g path_or_uri ...] [--accept] [--licenses]
+       [--deny-new-be | --require-new-be] [--be-name name]
        <variant_spec>=<instance> ...
+
           Change the specified variants in the current image.
 
+          With -g, use the specified package repository or archive as an
+          additional source of package data for the operation.  This option
+          may be specified multiple times.
+
           With the -n option, plan the requested operation but make
           no actual changes.
 
@@ -582,17 +606,23 @@
           required.
 
      facet [-H] [<facet_spec> ...]
+
           Without arguments, displays the current values of all facets.  With
           argument(s), evaluate if each facet would be true or false and print
           the result.
 
           With -H, omit the headers from the listing.
 
-     change-facet [-nvq] [--accept] [--be-name name] [--licenses]
+     change-facet [-nvq] [-g path_or_uri ...] [--accept] [--licenses]
+       [--deny-new-be | --require-new-be] [--be-name name]
        <facet_spec>=[True|False|None] ...
 
           Change the specified facets in the current image.
 
+          With -g, use the specified package repository or archive as an
+          additional source of package data for the operation.  This option
+          may be specified multiple times.
+
           With the -n option, plan the requested operation but make
           no actual changes.
 
@@ -621,40 +651,8 @@
           Facets may be set to True or False.  Setting one to None removes
           that facet specification from the current image.
 
-     fix [--accept] [--licenses] [pkg_fmri_pattern ...]
-          Fix any errors reported by pkg verify.  Please note that
-          verification of installed package content is based on a
-          custom content analysis that may return different results
-          than those of other programs.
-
-          With --accept, you indicate that you agree to and accept the
-          terms of the licenses of the packages that are updated or
-          installed.  If you do not provide this option, and any package
-          licenses require acceptance, the operation will fail.
-
-          With --licenses, display all of the licenses for the packages that
-          will be installed or updated as part of this operation.
+     property [-H] [propname ...]
 
-     set-property propname propvalue
-          Update an existing image property or add a new image property;
-          except for preferred-publisher, which can only be changed using
-          set-publisher.
-
-     add-property-value propname propvalue
-          Add a value to an existing image property or add a new image property;
-          except for preferred-publisher, which can only be changed using
-          set-publisher.
-
-     remove-property-value propname propvalue
-          Remove a value from an existing image property; except for
-          preferred-publisher, which can only be changed using set-publisher.
-
-     unset-property propname ...
-          Remove an existing image property or properties; except for
-          preferred-publisher, which can only be changed using
-          set-publisher.
-
-     property [-H] [propname ...]
           Display image property information.  With no argument, display the
           names and values for all image properties.  If a specific list of
           property names is requested, display the names and values for those
@@ -662,6 +660,43 @@
 
           With -H, omit the headers from the listing.
 
+     set-property propname propvalue
+
+          Update an existing image property or add a new image property;
+          except for preferred-publisher, which can only be changed using
+          set-publisher.
+
+     add-property-value propname propvalue
+
+          Add a value to an existing image property or add a new image property;
+          except for preferred-publisher, which can only be changed using
+          set-publisher.
+
+     remove-property-value propname propvalue
+
+          Remove a value from an existing image property; except for
+          preferred-publisher, which can only be changed using set-publisher.
+
+     unset-property propname ...
+
+          Remove an existing image property or properties; except for
+          preferred-publisher, which can only be changed using
+          set-publisher.
+
+     publisher [-HPn] [publisher ...]
+
+          Display publisher information.  With no arguments, display
+          the list of all publishers, their origin URIs, and mirrors
+          in order of search preference.  If specific publishers are
+          requested, display the configuration values, including
+          mirrors, associated with those publishers.
+
+          With -H, omit the headers from the listing.
+
+          With -P, display only the preferred publisher.
+
+          With -n, display only enabled publishers.
+
      set-publisher [-Ped] [-k ssl_key] [-c ssl_cert]
        [-g origin_to_add|--add-origin=origin_to_add ...]
        [-G origin_to_remove|--remove-origin=origin_to_remove ...]
@@ -732,14 +767,16 @@
 
           With -c and -k, specify client SSL certificate and key respectively.
 
-          With -g (--add-origin), add the URI as an origin for the given
-          publisher.  This should be the location of a package repository.
+          With -g (--add-origin), add the specified URI or path as an origin
+          for the given publisher.  This should be the location of a package
+          repository or archive.
 
-          With -G (--remove-origin), remove the URI from the list of origins
-          for the given publisher.
+          With -G (--remove-origin), remove the URI or path from the list of
+          origins for the given publisher.
 
-          With --no-refresh, do not attempt to contact the publisher
-          specified to retrieve its metadata (e.g. catalog).
+          With --no-refresh, do not attempt to contact the repositories
+          for the image's publishers to retrieve the newest list of
+          available packages and other metadata.
 
           With --reset-uuid, choose a new unique identifier that identifies
           this image to its publisher.
@@ -766,22 +803,10 @@
           disabled.
 
      unset-publisher publisher ...
+
           Remove the configuration associated with the given publisher
           or publishers.
 
-     publisher [-HPn] [publisher ...]
-          Display publisher information.  With no arguments, display
-          the list of all publishers, their origin URIs, and mirrors
-          in order of search preference.  If specific publishers are
-          requested, display the configuration values, including
-          mirrors, associated with those publishers.
-
-          With -H, omit the headers from the listing.
-
-          With -P, display only the preferred publisher.
-
-          With -n, display only enabled publishers.
-
      history [-Hl] [-t [time|time-time],...] [-n number] [-o column,...]
 
           Display the command history of the applicable image.
@@ -845,22 +870,67 @@
           are those recorded at the time of the pkg operation.
 
      purge-history
+
           Deletes all existing history information.
 
      rebuild-index
+
           Rebuilds the index used by 'pkg search'.  This is a recovery operation
           not intended for general use.
 
      update-format
+
           Updates the format of the image to the current version.  Once
           this operation has completed, the image may no longer be used
           with older versions of the pkg(5) system.
 
      version
+
           Display a unique string identifying the version of pkg(1).  This
           string is not guaranteed to be comparable in any fashion between
           versions.
 
+     image-create [-FPUfz] [--force] [--full|--partial|--user] [--zone]
+       [-k ssl_key] [-c ssl_cert] [--no-refresh]
+       [--variant <variant_spec>=<instance> ...]
+       [-g uri|--origin=uri ...] [-m uri|--mirror=uri ...]
+       [--facet <facet_spec>=[True|False] ...]
+       (-p|--publisher) [<name>=]<repo_uri> dir
+
+          Create, at location given by dir, an image suitable for package
+          operations.  The default image type is user, as given by the -U
+          (--user) option.  The image type may be set to a full image (-F
+          or --full) or to a partial image (-P or --partial) linked to the
+          full image enclosing the given dir path.  Additional origins can
+          be specified using -g or --origin, while additional mirrors can
+          be specified using -m or --mirror.
+
+          A package repository URI must be provided using the -p or
+          --publisher option.  If a publisher name is also provided, then
+          only that publisher will be added when the image is created.  If
+          a publisher name is not provided, then all publishers known by the
+          specified repository will be added to the image.  An attempt to
+          retrieve the catalog associated with this publisher will be made
+          following the initial creation operations.
+
+          For publishers using client SSL authentication, a client key and
+          client certificate may be registered via the -c and -k options,
+          and will be used for all publishers added during image creation.
+
+          If the image is to be run within non-global zone context, then
+          the -z (--zone) option can be used to set an appropriate variant.
+
+          With -f (--force), force the creation of an image over an existing
+          image.  This option should be used with care.
+
+          With --no-refresh, do not attempt to contact the repositories
+          for the image's publishers to retrieve the newest list of
+          available packages and other metadata.
+
+          With --variant, set the specified variant to the indicated value.
+
+          With --facet, set the specified facet to the indicated value.
+
 IMAGE PROPERTIES
      The following properties are part of the image and may be set using
      the set-property subcommand.  The values of these properties are
@@ -912,7 +982,7 @@
      signature-policy
           (string)  Determine what checks will be performed on manifests
           when installing a package into this image.  The final policy
-          applied to a  package depends on the combination of image policy
+          applied to a package depends on the combination of image policy
           and publisher policy.  The combination will be at least as strict
           as the stricter of the two policies taken individually.  The
           following are the valid values for this property.
@@ -1008,7 +1078,7 @@
      limiting the display to just the package name and path attributes of
      actions whose "path" attribute ends in ".desktop" or ".png".
 
-     $ pkg contents contents -o pkg.name,path -a path=\*.desktop \
+     $ pkg contents -o pkg.name,path -a path=\*.desktop \
          -a path=\*.png SUNWfirefox SUNWthunderbird
      PKG.NAME        PATH
      SUNWfirefox     usr/lib/firefox/chrome/icons/default/default16.png
@@ -1124,6 +1194,24 @@
 
      $ pkg install --reject B C
 
+     Example 23:  List all versions of all packages in a package archive:
+
+     $ pkg list -g /my/archive.p5p -f
+
+     Example 24:  List all versions of all packages in a repository:
+
+     $ pkg list -g http://example.com:10000 -f
+
+     Example 25:  Display the package information for the latest version of
+     a package in a package archive that may not be currently installed:
+
+     $ pkg info -g /my/archive.p5p pkg_name
+
+     Example 26:  Display the contents of a package in a package archive
+     that is not currently installed:
+
+     $ pkg contents -g /my/archive.p5p pkg_name
+
 ENVIRONMENT VARIABLES
      PKG_IMAGE
           Specifies the directory containing the image to use for package
--- a/src/man/pkgrecv.1.txt	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/man/pkgrecv.1.txt	Wed Feb 09 18:43:21 2011 -0800
@@ -5,20 +5,29 @@
      pkgrecv - image packaging system content retrieval utility
 
 SYNOPSIS
-     /usr/bin/pkgrecv [-s src_uri] [-d (path|dest_uri)] [-c cache_dir]
+     /usr/bin/pkgrecv [-s src_uri] [-a] [-d (path|dest_uri)] [-c cache_dir]
          [-kr] [-m match] [-n] [--raw] [--key keyfile --cert certfile] 
          (fmri|pattern) ...
      /usr/bin/pkgrecv [-s src_uri] --newest
 
 DESCRIPTION
-     pkgrecv allows the user to retrieve the contents of a package
-     from a pkg(5) repository.  With the default options, the contents
-     are retrieved in a repository format suitable for use with
-     pkg.depotd(1M).
+     pkgrecv allows the user to retrieve packages from a pkg(5) repository
+     or package archive.  It can also optionally republish the retreived
+     packages to a different package repository or archive them.  By
+     default, packages are retrieved in package repository format suitable
+     for use with pkg(1), pkg.depotd(1M), and package publication tools.
 
 OPTIONS
      The following options are supported:
 
+     -a              Store the retrieved package data in a pkg(5) archive
+                     at the location specified by -d.  The file may not
+                     already exist, and this option may only be used with
+                     filesystem-based destinations.  Although not required,
+                     it is strongly suggested that a file extension of
+                     '.p5p' is used (e.g. 'archive.p5p').  This option may
+                     not be combined with --raw.
+
      -c cache_dir    The path to a directory that will be used to cache
                      downloaded content.  If one is not supplied, the
                      client will automatically pick a cache directory.
@@ -30,7 +39,8 @@
                      republish packages to.  If not provided, the default
                      value is the current working directory.  The target
                      must already exist.  New repositories can be created
-                     using pkgrepo(1).
+                     using pkgrepo(1).  If -a is specified, the target is
+                     assumed to be a new package archive.
 
      -h              Display usage message.
 
@@ -68,7 +78,8 @@
                      based destinations.  This can be used with pkgsend(1)
                      include to conveniently modify and republish packages,
                      perhaps by correcting file contents or providing
-                     additional package metadata.
+                     additional package metadata.  This option may not be
+                     combined with -a.
 
 
 
@@ -124,16 +135,28 @@
 
      $ pkgrecv -s http://example.com:10000 -d /my/pkg/repo '*'
 
+     Example 9: Create a package archive containing the package "SUNWemacs"
+     and all of its dependencies from the repository located at
+     http://example.com:10000:
+
+     $ pkgrecv -s http://example.com:10000 -d /my/emacs.p5p -a -r SUNWemacs
+
+     Example 10: Copy all of the packages in a package archive to an
+     existing repository located at '/export/repo':
+
+     $ pkgrecv -s /my/archive.p5p -d /export/repo '*'
+
 ENVIRONMENT VARIABLES
      The following environment variables are supported:
 
      PKG_DEST           The path of a directory to save the retrieved
-                        package to, or the URI of a repository to
-                        republish it to.  If not provided, the default
-                        value is the current working directory.
+                        package to, or the filesystem path or URI of a
+                        repository to republish it to.  If not provided,
+                        the default value is the current working directory.
 
-     PKG_SRC            A URI representing the location of a pkg(5)
-                        repository to retrieve package data from.
+     PKG_SRC            A URI or filesystem path representing the location
+                        of a pkg(5) repository or package archive to
+                        retrieve package data from.
 
 EXIT STATUS
      The following exit values are returned:
--- a/src/man/pkgsend.1.txt	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/man/pkgsend.1.txt	Wed Feb 09 18:43:21 2011 -0800
@@ -23,7 +23,8 @@
      structured as a transaction; multiple invocations of pkgsend
      through a transaction allow the addition of resources and the
      transaction submission.  To create or manage repositories, see
-     pkgrepo(1).
+     pkgrepo(1).  To create package archives from packages in an
+     existing repository, see pkgrecv(1).
 
 OPTIONS
      The following options are supported:
--- a/src/modules/actions/license.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/actions/license.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a license packaging object
@@ -171,7 +171,8 @@
 
                 try:
                         pub = img.get_publisher(pfmri.publisher)
-                        return img.transport.get_content(pub, self.hash)
+                        return img.transport.get_content(pub, self.hash,
+                            fmri=pfmri)
                 finally:
                         img.cleanup_downloads()
 
--- a/src/modules/client/api.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/api.py	Wed Feb 09 18:43:21 2011 -0800
@@ -36,11 +36,13 @@
 
 import collections
 import copy
+import datetime
 import errno
 import fnmatch
 import os
 import shutil
 import sys
+import tempfile
 import threading
 import urllib
 
@@ -66,7 +68,7 @@
 from pkg.client.imageplan import EXECUTED_OK
 from pkg.client import global_settings
 
-CURRENT_API_VERSION = 52
+CURRENT_API_VERSION = 53
 CURRENT_P5I_VERSION = 1
 
 # Image type constants.
@@ -77,6 +79,109 @@
 
 logger = global_settings.logger
 
+class _LockedGenerator(object):
+        """This is a private class and should not be used by API consumers.
+
+        This decorator class wraps API generator functions, managing the
+        activity and cancelation locks.  Due to implementation differences
+        in the decorator protocol, the decorator must be used with
+        parenthesis in order for this to function correctly.  Always
+        decorate functions @_LockedGenerator()."""
+
+        def __init__(self, *d_args, **d_kwargs):
+                object.__init__(self)
+
+        def __call__(self, f):
+                def wrapper(*fargs, **f_kwargs):
+                        instance, fargs = fargs[0], fargs[1:]
+                        instance._acquire_activity_lock()
+                        instance._enable_cancel()
+
+                        clean_exit = True
+                        canceled = False
+                        try:
+                                for v in f(instance, *fargs, **f_kwargs):
+                                        yield v
+                        except GeneratorExit:
+                                return
+                        except apx.CanceledException:
+                                canceled = True
+                                raise
+                        except Exception:
+                                clean_exit = False
+                                raise
+                        finally:
+                                if canceled:
+                                        instance._cancel_done()
+                                elif clean_exit:
+                                        try:
+                                                instance._disable_cancel()
+                                        except apx.CanceledException:
+                                                instance._cancel_done()
+                                                instance._activity_lock.release()
+                                                raise
+                                else:
+                                        instance._cancel_cleanup_exception()
+                                instance._activity_lock.release()
+
+                return wrapper
+
+
+class _LockedCancelable(object):
+        """This is a private class and should not be used by API consumers.
+
+        This decorator class wraps non-generator cancelable API functions,
+        managing the activity and cancelation locks.  Due to implementation
+        differences in the decorator protocol, the decorator must be used with
+        parenthesis in order for this to function correctly.  Always
+        decorate functions @_LockedCancelable()."""
+
+        def __init__(self, *d_args, **d_kwargs):
+                object.__init__(self)
+
+        def __call__(self, f):
+                def wrapper(*fargs, **f_kwargs):
+                        instance, fargs = fargs[0], fargs[1:]
+                        instance._acquire_activity_lock()
+                        instance._enable_cancel()
+
+                        clean_exit = True
+                        canceled = False
+                        try:
+                                return f(instance, *fargs, **f_kwargs)
+                        except apx.CanceledException:
+                                canceled = True
+                                raise
+                        except Exception:
+                                clean_exit = False
+                                raise
+                        finally:
+                                instance._img.cleanup_downloads()
+                                try:
+                                        if int(os.environ.get("PKG_DUMP_STATS",
+                                            0)) > 0:
+                                                instance._img.transport.stats.dump()
+                                except ValueError:
+                                        # Don't generate stats if an invalid
+                                        # value is supplied.
+                                        pass
+
+                                if canceled:
+                                        instance._cancel_done()
+                                elif clean_exit:
+                                        try:
+                                                instance._disable_cancel()
+                                        except apx.CanceledException:
+                                                instance._cancel_done()
+                                                instance._activity_lock.release()
+                                                raise
+                                else:
+                                        instance._cancel_cleanup_exception()
+                                instance._activity_lock.release()
+
+                return wrapper
+
+
 class ImageInterface(object):
         """This class presents an interface to images that clients may use.
         There is a specific order of methods which must be used to install
@@ -158,18 +263,18 @@
 
                 if isinstance(img_path, basestring):
                         # Store this for reset().
-                        self.__img_path = img_path
-                        self.__img = image.Image(img_path,
+                        self._img_path = img_path
+                        self._img = image.Image(img_path,
                             progtrack=progresstracker,
                             user_provided_dir=exact_match)
 
                         # Store final image path.
-                        self.__img_path = self.__img.get_root()
+                        self._img_path = self._img.get_root()
                 elif isinstance(img_path, image.Image):
                         # This is a temporary, special case for client.py
                         # until the image api is complete.
-                        self.__img = img_path
-                        self.__img_path = img_path.get_root()
+                        self._img = img_path
+                        self._img_path = img_path.get_root()
                 else:
                         # API consumer passed an unknown type for img_path.
                         raise TypeError(_("Unknown img_path type."))
@@ -183,18 +288,32 @@
                 self.__be_name = None
                 self.__can_be_canceled = False
                 self.__canceling = False
-                self.__activity_lock = pkg.nrlock.NRLock()
+                self._activity_lock = pkg.nrlock.NRLock()
                 self.__blocking_locks = False
-                self.__img.blocking_locks = self.__blocking_locks
+                self._img.blocking_locks = self.__blocking_locks
                 self.__cancel_lock = pkg.nrlock.NRLock()
                 self.__cancel_cv = threading.Condition(self.__cancel_lock)
                 self.__new_be = None # create if needed
+                self.__alt_sources = {}
 
         def __set_blocking_locks(self, value):
-                self.__activity_lock.acquire()
+                self._activity_lock.acquire()
                 self.__blocking_locks = value
-                self.__img.blocking_locks = value
-                self.__activity_lock.release()
+                self._img.blocking_locks = value
+                self._activity_lock.release()
+
+        def __set_img_alt_sources(self, repos):
+                """Private helper function to change image to use alternate
+                package sources if applicable."""
+
+                # When using alternate package sources with the image, the
+                # result is a composite of the package data already known
+                # by the image and the alternate sources.
+                if repos:
+                        self._img.set_alt_pkg_sources(
+                            self.__get_alt_pkg_data(repos))
+                else:
+                        self._img.set_alt_pkg_sources(None)
 
         blocking_locks = property(lambda self: self.__blocking_locks,
             __set_blocking_locks, doc="A boolean value indicating whether "
@@ -207,37 +326,37 @@
         @property
         def excludes(self):
                 """The list of excludes for the image."""
-                return self.__img.list_excludes()
+                return self._img.list_excludes()
 
         @property
         def img(self):
                 """Private; public access to this property will be removed at
                 a later date.  Do not use."""
-                return self.__img
+                return self._img
 
         @property
         def img_type(self):
                 """Returns the IMG_TYPE constant for the image's type."""
-                if not self.__img:
+                if not self._img:
                         return None
-                return self.__img.image_type(self.__img.root)
+                return self._img.image_type(self._img.root)
 
         @property
         def is_zone(self):
                 """A boolean value indicating whether the image is a zone."""
-                return self.__img.is_zone()
+                return self._img.is_zone()
 
         @property
         def last_modified(self):
                 """A datetime object representing when the image's metadata was
                 last updated."""
 
-                return self.__img.get_last_modified()
+                return self._img.get_last_modified()
 
         def __set_progresstracker(self, value):
-                self.__activity_lock.acquire()
+                self._activity_lock.acquire()
                 self.__progresstracker = value
-                self.__activity_lock.release()
+                self._activity_lock.release()
 
         progresstracker = property(lambda self: self.__progresstracker,
             __set_progresstracker, doc="The current ProgressTracker object.  "
@@ -248,9 +367,9 @@
         def root(self):
                 """The absolute pathname of the filesystem root of the image.
                 This property is read-only."""
-                if not self.__img:
+                if not self._img:
                         return None
-                return self.__img.root
+                return self._img.root
 
         @staticmethod
         def check_be_name(be_name):
@@ -275,7 +394,7 @@
                 assert apx.ExpiringCertificate not in log_op_end
 
                 try:
-                        self.__img.check_cert_validity()
+                        self._img.check_cert_validity()
                 except apx.ExpiringCertificate, e:
                         logger.error(e)
                 except:
@@ -294,7 +413,7 @@
                 #
                 self.__cert_verify()
                 try:
-                        self.__img.refresh_publishers(immediate=True,
+                        self._img.refresh_publishers(immediate=True,
                             progtrack=self.__progresstracker)
                 except apx.ImageFormatUpdateNeeded:
                         # If image format update is needed to perform refresh,
@@ -306,12 +425,12 @@
                         # this isn't useful, but this is as good as it gets.)
                         logger.warning(_("Skipping publisher metadata refresh;"
                             "image rooted at %s must have its format updated "
-                            "before a refresh can occur.") % self.__img.root)
-
-        def __acquire_activity_lock(self):
+                            "before a refresh can occur.") % self._img.root)
+
+        def _acquire_activity_lock(self):
                 """Private helper method to aqcuire activity lock."""
 
-                rc = self.__activity_lock.acquire(
+                rc = self._activity_lock.acquire(
                     blocking=self.__blocking_locks)
                 if not rc:
                         raise apx.ImageLockedError()
@@ -322,42 +441,42 @@
                     Log the start of the operation.
                     Check be_name."""
 
-                self.__acquire_activity_lock()
+                self._acquire_activity_lock()
                 try:
-                        self.__enable_cancel()
+                        self._enable_cancel()
                         if self.__plan_type is not None:
                                 raise apx.PlanExistsException(
                                     self.__plan_type)
-                        self.__img.lock(allow_unprivileged=noexecute)
+                        self._img.lock(allow_unprivileged=noexecute)
                 except:
-                        self.__cancel_cleanup_exception()
-                        self.__activity_lock.release()
+                        self._cancel_cleanup_exception()
+                        self._activity_lock.release()
                         raise
 
-                assert self.__activity_lock._is_owned()
+                assert self._activity_lock._is_owned()
                 self.log_operation_start(operation)
                 self.__new_be = new_be
                 self.__be_name = be_name
                 if self.__be_name is not None:
                         self.check_be_name(be_name)
-                        if not self.__img.is_liveroot():
+                        if not self._img.is_liveroot():
                                 raise apx.BENameGivenOnDeadBE(self.__be_name)
 
         def __plan_common_finish(self):
                 """Finish planning an operation."""
 
-                assert self.__activity_lock._is_owned()
-                self.__img.cleanup_downloads()
-                self.__img.unlock()
+                assert self._activity_lock._is_owned()
+                self._img.cleanup_downloads()
+                self._img.unlock()
                 try:
                         if int(os.environ.get("PKG_DUMP_STATS", 0)) > 0:
-                                self.__img.transport.stats.dump()
+                                self._img.transport.stats.dump()
                 except ValueError:
                         # Don't generate stats if an invalid value
                         # is supplied.
                         pass
 
-                self.__activity_lock.release()
+                self._activity_lock.release()
 
         def __set_new_be(self):
                 """Figure out whether or not we'd create a new boot environment
@@ -365,11 +484,11 @@
                 can't have one."""
                 # decide whether or not to create new BE.
 
-                if self.__img.is_liveroot():
+                if self._img.is_liveroot():
                         if self.__new_be is None:
-                                self.__new_be = self.__img.imageplan.reboot_needed()
+                                self.__new_be = self._img.imageplan.reboot_needed()
                         elif self.__new_be is False and \
-                            self.__img.imageplan.reboot_needed():
+                            self._img.imageplan.reboot_needed():
                                 raise apx.ImageUpdateOnLiveImageException()
                 else:
                         self.__new_be = False
@@ -385,7 +504,6 @@
                 log_op_end."""
 
                 if log_op_end == None:
-
                         log_op_end = []
 
                 # we always explicity handle apx.PlanCreationException
@@ -396,7 +514,7 @@
                 if exc_type == apx.PlanCreationException:
                         self.__set_history_PlanCreationException(exc_value)
                 elif exc_type == apx.CanceledException:
-                        self.__cancel_done()
+                        self._cancel_done()
                 elif exc_type == apx.ConflictingActionErrors:
                         self.log_operation_end(error=str(exc_value),
                             result=history.RESULT_CONFLICTING_ACTIONS)
@@ -406,11 +524,11 @@
                 if exc_type != apx.ImageLockedError:
                         # Must be called before reset_unlock, and only if
                         # the exception was not a locked error.
-                        self.__img.unlock()
+                        self._img.unlock()
 
                 try:
                         if int(os.environ.get("PKG_DUMP_STATS", 0)) > 0:
-                                self.__img.transport.stats.dump()
+                                self._img.transport.stats.dump()
                 except ValueError:
                         # Don't generate stats if an invalid value
                         # is supplied.
@@ -422,22 +540,24 @@
                 # information in the plan.  We have to save it here and restore
                 # it later because __reset_unlock() torches it.
                 if exc_type == apx.ConflictingActionErrors:
-                        plan_desc = PlanDescription(self.__img, self.__new_be)
+                        plan_desc = PlanDescription(self._img, self.__new_be)
 
                 self.__reset_unlock()
 
                 if exc_type == apx.ConflictingActionErrors:
                         self.__plan_desc = plan_desc
 
-                self.__activity_lock.release()
+                self._activity_lock.release()
                 raise
 
         def plan_install(self, pkg_list, refresh_catalogs=True,
             noexecute=False, update_index=True, be_name=None,
-            reject_list=misc.EmptyI, new_be=False):
+            reject_list=misc.EmptyI, new_be=False, repos=None):
                 """Constructs a plan to install the packages provided in
                 pkg_list.  Once an operation has been planned, it may be
                 executed by first calling prepare(), and then execute_plan().
+                After execution of a plan, or to abandon a plan, reset() should
+                be called.
 
                 'pkg_list' is a list of packages to install.
 
@@ -465,6 +585,11 @@
                 needed, an ImageUpdateOnLiveImageException will be raised.
                 If None, a new boot environment will be created only if needed.
 
+                'repos' is a list of URI strings or RepositoryURI objects that
+                represent the locations of additional sources of package data to
+                use during the planned operation.  All API functions called
+                while a plan is still active will use this package data.
+
                 This function returns a boolean indicating whether there is
                 anything to do."""
 
@@ -473,26 +598,28 @@
                         if refresh_catalogs:
                                 self.__refresh_publishers()
 
-                        self.__img.make_install_plan(pkg_list,
+                        self.__set_img_alt_sources(repos)
+                        self._img.make_install_plan(pkg_list,
                             self.__progresstracker,
                             self.__check_cancelation, noexecute,
                             reject_list=reject_list)
 
-                        assert self.__img.imageplan
-
-                        self.__disable_cancel()
+                        assert self._img.imageplan
+
+                        self._disable_cancel()
 
                         if not noexecute:
                                 self.__plan_type = self.__INSTALL
 
                         self.__set_new_be()
 
-                        self.__plan_desc = PlanDescription(self.__img, self.__new_be)
-                        if self.__img.imageplan.nothingtodo() or noexecute:
+                        self.__plan_desc = PlanDescription(self._img,
+                            self.__new_be)
+                        if self._img.imageplan.nothingtodo() or noexecute:
                                 self.log_operation_end(
                                     result=history.RESULT_NOTHING_TO_DO)
 
-                        self.__img.imageplan.update_index = update_index
+                        self._img.imageplan.update_index = update_index
                 except:
                         self.__plan_common_exception(log_op_end=[
                             apx.CanceledException, fmri.IllegalFmri,
@@ -500,7 +627,7 @@
                         # NOTREACHED
 
                 self.__plan_common_finish()
-                res = not self.__img.imageplan.nothingtodo()
+                res = not self._img.imageplan.nothingtodo()
                 return res
 
         def plan_uninstall(self, pkg_list, recursive_removal, noexecute=False,
@@ -508,6 +635,8 @@
                 """Constructs a plan to remove the packages provided in
                 pkg_list.  Once an operation has been planned, it may be
                 executed by first calling prepare(), and then execute_plan().
+                After execution of a plan, or to abandon a plan, reset() should
+                be called.
 
                 'pkg_list' is a list of packages to install.
 
@@ -524,39 +653,41 @@
                     be_name)
 
                 try:
-                        self.__img.make_uninstall_plan(pkg_list,
+                        self._img.make_uninstall_plan(pkg_list,
                             recursive_removal, self.__progresstracker,
                             self.__check_cancelation, noexecute)
 
-                        assert self.__img.imageplan
-
-                        self.__disable_cancel()
+                        assert self._img.imageplan
+
+                        self._disable_cancel()
 
                         if not noexecute:
                                 self.__plan_type = self.__UNINSTALL
 
                         self.__set_new_be()
 
-                        self.__plan_desc = PlanDescription(self.__img,
+                        self.__plan_desc = PlanDescription(self._img,
                             self.__new_be)
                         if noexecute:
                                 self.log_operation_end(
                                     result=history.RESULT_NOTHING_TO_DO)
-                        self.__img.imageplan.update_index = update_index
+                        self._img.imageplan.update_index = update_index
                 except:
                         self.__plan_common_exception()
                         # NOTREACHED
 
                 self.__plan_common_finish()
-                res = not self.__img.imageplan.nothingtodo()
+                res = not self._img.imageplan.nothingtodo()
                 return res
 
         def plan_update(self, pkg_list, refresh_catalogs=True,
             reject_list=misc.EmptyI, noexecute=False, update_index=True,
-            be_name=None, new_be=False):
+            be_name=None, new_be=False, repos=None):
                 """Constructs a plan to update the packages provided in
                 pkg_list.  Once an operation has been planned, it may be
                 executed by first calling prepare(), and then execute_plan().
+                After execution of a plan, or to abandon a plan, reset() should
+                be called.
 
                 'pkg_list' is a list of packages to update.
 
@@ -572,26 +703,27 @@
                         if refresh_catalogs:
                                 self.__refresh_publishers()
 
-                        self.__img.make_update_plan(self.__progresstracker,
+                        self.__set_img_alt_sources(repos)
+                        self._img.make_update_plan(self.__progresstracker,
                             self.__check_cancelation, noexecute,
                             pkg_list=pkg_list, reject_list=reject_list)
 
-                        assert self.__img.imageplan
-
-                        self.__disable_cancel()
+                        assert self._img.imageplan
+
+                        self._disable_cancel()
 
                         if not noexecute:
                                 self.__plan_type = self.__UPDATE
 
                         self.__set_new_be()
 
-                        self.__plan_desc = PlanDescription(self.__img,
+                        self.__plan_desc = PlanDescription(self._img,
                             self.__new_be)
-                        if self.__img.imageplan.nothingtodo() or noexecute:
+                        if self._img.imageplan.nothingtodo() or noexecute:
                                 self.log_operation_end(
                                     result=history.RESULT_NOTHING_TO_DO)
 
-                        self.__img.imageplan.update_index = update_index
+                        self._img.imageplan.update_index = update_index
                 except:
                         self.__plan_common_exception(log_op_end=[
                             apx.CanceledException, fmri.IllegalFmri,
@@ -599,7 +731,7 @@
                         # NOTREACHED
 
                 self.__plan_common_finish()
-                res = not self.__img.imageplan.nothingtodo()
+                res = not self._img.imageplan.nothingtodo()
                 return res
 
         def __is_pkg5_native_packaging(self):
@@ -609,20 +741,20 @@
 
                 # First check to see if the special package "release/name"
                 # exists and contains metadata saying this is Solaris.
-                results = self.get_pkg_list(self.LIST_INSTALLED,
+                results = self.__get_pkg_list(self.LIST_INSTALLED,
                     patterns=["release/name"], return_fmris=True)
                 results = [e for e in results]
                 if results:
                         pfmri, summary, categories, states = \
                             results[0]
-                        mfst = self.__img.get_manifest(pfmri)
+                        mfst = self._img.get_manifest(pfmri)
                         osname = mfst.get("pkg.release.osname", None)
                         if osname == "sunos":
                                 return True
 
                 # Otherwise, see if we can find package/pkg (or SUNWipkg) and
                 # SUNWcs.
-                results = self.get_pkg_list(self.LIST_INSTALLED,
+                results = self.__get_pkg_list(self.LIST_INSTALLED,
                     patterns=["pkg:/package/pkg", "SUNWipkg", "SUNWcs"])
                 installed = set(e[0][1] for e in results)
                 if "SUNWcs" in installed and ("SUNWipkg" in installed or
@@ -633,11 +765,12 @@
 
         def plan_update_all(self, refresh_catalogs=True,
             reject_list=misc.EmptyI, noexecute=False, force=False,
-            update_index=True, be_name=None, new_be=True):
+            update_index=True, be_name=None, new_be=True, repos=None):
                 """Constructs a plan to update all packages on the system
                 to the latest known versions.  Once an operation has been
                 planned, it may be executed by first calling prepare(), and
-                then execute_plan().
+                then execute_plan().  After execution of a plan, or to abandon
+                a plan, reset() should be called.
 
                 'force' indicates whether update should skip the package
                 system up to date check.
@@ -659,7 +792,7 @@
 
                         if opensolaris_image and not force:
                                 try:
-                                        if not self.__img.ipkg_is_up_to_date(
+                                        if not self._img.ipkg_is_up_to_date(
                                             self.__check_cancelation,
                                             noexecute,
                                             refresh_allowed=refresh_catalogs,
@@ -670,25 +803,26 @@
                                         # case; so proceed.
                                         pass
 
-                        self.__img.make_update_plan(self.__progresstracker,
+                        self.__set_img_alt_sources(repos)
+                        self._img.make_update_plan(self.__progresstracker,
                             self.__check_cancelation, noexecute,
                             reject_list=reject_list)
 
-                        assert self.__img.imageplan
-
-                        self.__disable_cancel()
+                        assert self._img.imageplan
+
+                        self._disable_cancel()
 
                         if not noexecute:
                                 self.__plan_type = self.__UPDATE
                         self.__set_new_be()
 
-                        self.__plan_desc = PlanDescription(self.__img,
+                        self.__plan_desc = PlanDescription(self._img,
                             self.__new_be)
 
-                        if self.__img.imageplan.nothingtodo() or noexecute:
+                        if self._img.imageplan.nothingtodo() or noexecute:
                                 self.log_operation_end(
                                     result=history.RESULT_NOTHING_TO_DO)
-                        self.__img.imageplan.update_index = update_index
+                        self._img.imageplan.update_index = update_index
 
                 except:
                         self.__plan_common_exception(
@@ -696,13 +830,14 @@
                         # NOTREACHED
 
                 self.__plan_common_finish()
-                res = not self.__img.imageplan.nothingtodo()
+                res = not self._img.imageplan.nothingtodo()
                 return res, opensolaris_image
 
         def plan_change_varcets(self, variants=None, facets=None,
-            noexecute=False, be_name=None, new_be=None):
+            noexecute=False, be_name=None, new_be=None, repos=None):
                 """Creates a plan to change the specified variants and/or facets
-                for the image.
+                for the image.  After execution of a plan, or to abandon a plan,
+                reset() should be called.
 
                 'variants' is a dict of the variants to change the values of.
 
@@ -722,21 +857,22 @@
                 try:
                         self.__refresh_publishers()
 
-                        self.__img.image_change_varcets(variants,
+                        self.__set_img_alt_sources(repos)
+                        self._img.image_change_varcets(variants,
                             facets, self.__progresstracker,
                             self.__check_cancelation, noexecute)
 
-                        assert self.__img.imageplan
+                        assert self._img.imageplan
                         self.__set_new_be()
 
-                        self.__disable_cancel()
+                        self._disable_cancel()
 
                         if not noexecute:
                                 self.__plan_type = self.__VARCET
 
-                        self.__plan_desc = PlanDescription(self.__img, self.__new_be)
-
-                        if self.__img.imageplan.nothingtodo() or noexecute:
+                        self.__plan_desc = PlanDescription(self._img, self.__new_be)
+
+                        if self._img.imageplan.nothingtodo() or noexecute:
                                 self.log_operation_end(
                                     result=history.RESULT_NOTHING_TO_DO)
 
@@ -744,14 +880,14 @@
                         # We always rebuild the search index after a
                         # variant change
                         #
-                        self.__img.imageplan.update_index = True
+                        self._img.imageplan.update_index = True
 
                 except:
                         self.__plan_common_exception()
                         # NOTREACHED
 
                 self.__plan_common_finish()
-                res = not self.__img.imageplan.nothingtodo()
+                res = not self._img.imageplan.nothingtodo()
                 return res
 
         def plan_revert(self, args, tagged=False, noexecute=True, be_name=None,
@@ -765,27 +901,27 @@
 
                 self.__plan_common_start("revert", noexecute, new_be, be_name)
                 try:
-                        self.__img.make_revert_plan(args,
+                        self._img.make_revert_plan(args,
                             tagged,
                             self.__progresstracker,
                             self.__check_cancelation,
                             noexecute)
 
-                        assert self.__img.imageplan
-
-                        self.__disable_cancel()
+                        assert self._img.imageplan
+
+                        self._disable_cancel()
 
                         if not noexecute:
                                 self.__plan_type = self.__REVERT
 
                         self.__set_new_be()
 
-                        self.__plan_desc = PlanDescription(self.__img, self.__new_be)
-                        if self.__img.imageplan.nothingtodo() or noexecute:
+                        self.__plan_desc = PlanDescription(self._img, self.__new_be)
+                        if self._img.imageplan.nothingtodo() or noexecute:
                                 self.log_operation_end(
                                     result=history.RESULT_NOTHING_TO_DO)
 
-                        self.__img.imageplan.update_index = False
+                        self._img.imageplan.update_index = False
                 except:
                         self.__plan_common_exception(log_op_end=[
                             apx.CanceledException, fmri.IllegalFmri,
@@ -793,7 +929,7 @@
                         # NOTREACHED
 
                 self.__plan_common_finish()
-                res = not self.__img.imageplan.nothingtodo()
+                res = not self._img.imageplan.nothingtodo()
                 return res
 
         def describe(self):
@@ -804,19 +940,21 @@
 
         def prepare(self):
                 """Takes care of things which must be done before the plan can
-                be executed. This includes downloading the packages to disk and
+                be executed.  This includes downloading the packages to disk and
                 preparing the indexes to be updated during execution.  Should
-                only be called once a plan_X method has been called."""
-
-                self.__acquire_activity_lock()
+                only be called once a plan_*() method has been called.  If a
+                plan is abandoned after calling this method, reset() should be
+                called."""
+
+                self._acquire_activity_lock()
                 try:
-                        self.__img.lock()
+                        self._img.lock()
                 except:
-                        self.__activity_lock.release()
+                        self._activity_lock.release()
                         raise
 
                 try:
-                        if not self.__img.imageplan:
+                        if not self._img.imageplan:
                                 raise apx.PlanMissingException()
 
                         if self.__prepared:
@@ -824,27 +962,27 @@
 
                         assert self.__plan_type in self.__valid_plan_types
 
-                        self.__enable_cancel()
+                        self._enable_cancel()
 
                         try:
-                                self.__img.imageplan.preexecute()
+                                self._img.imageplan.preexecute()
                         except search_errors.ProblematicPermissionsIndexException, e:
                                 raise apx.ProblematicPermissionsIndexException(e)
                         except:
                                 raise
 
-                        self.__disable_cancel()
+                        self._disable_cancel()
                         self.__prepared = True
                 except apx.CanceledException, e:
-                        self.__cancel_done()
-                        if self.__img.history.operation_name:
+                        self._cancel_done()
+                        if self._img.history.operation_name:
                                 # If an operation is in progress, log
                                 # the error and mark its end.
                                 self.log_operation_end(error=e)
                         raise
                 except Exception, e:
-                        self.__cancel_cleanup_exception()
-                        if self.__img.history.operation_name:
+                        self._cancel_cleanup_exception()
+                        if self._img.history.operation_name:
                                 # If an operation is in progress, log
                                 # the error and mark its end.
                                 self.log_operation_end(error=e)
@@ -852,8 +990,8 @@
                 except:
                         # Handle exceptions that are not subclasses of
                         # Exception.
-                        self.__cancel_cleanup_exception()
-                        if self.__img.history.operation_name:
+                        self._cancel_cleanup_exception()
+                        if self._img.history.operation_name:
                                 # If an operation is in progress, log
                                 # the error and mark its end.
                                 exc_type, exc_value, exc_traceback = \
@@ -861,32 +999,32 @@
                                 self.log_operation_end(error=exc_type)
                         raise
                 finally:
-                        self.__img.cleanup_downloads()
-                        self.__img.unlock()
+                        self._img.cleanup_downloads()
+                        self._img.unlock()
                         try:
                                 if int(os.environ.get("PKG_DUMP_STATS", 0)) > 0:
-                                        self.__img.transport.stats.dump()
+                                        self._img.transport.stats.dump()
                         except ValueError:
                                 # Don't generate stats if an invalid value
                                 # is supplied.
                                 pass
-                        self.__activity_lock.release()
+                        self._activity_lock.release()
 
         def execute_plan(self):
                 """Executes the plan. This is uncancelable once it begins.
                 Should only be called after the prepare method has been
-                called."""
-
-                self.__acquire_activity_lock()
+                called.  After plan execution, reset() should be called."""
+
+                self._acquire_activity_lock()
                 try:
-                        self.__disable_cancel()
-                        self.__img.lock()
+                        self._disable_cancel()
+                        self._img.lock()
                 except:
-                        self.__activity_lock.release()
+                        self._activity_lock.release()
                         raise
 
                 try:
-                        if not self.__img.imageplan:
+                        if not self._img.imageplan:
                                 raise apx.PlanMissingException()
 
                         if not self.__prepared:
@@ -898,21 +1036,21 @@
                         assert self.__plan_type in self.__valid_plan_types
 
                         try:
-                                be = bootenv.BootEnv(self.__img)
+                                be = bootenv.BootEnv(self._img)
                         except RuntimeError:
-                                be = bootenv.BootEnvNull(self.__img)
-                        self.__img.bootenv = be
+                                be = bootenv.BootEnvNull(self._img)
+                        self._img.bootenv = be
 
                         if self.__new_be == False and \
-                            self.__img.imageplan.reboot_needed() and \
-                            self.__img.is_liveroot():
+                            self._img.imageplan.reboot_needed() and \
+                            self._img.is_liveroot():
                                 e = apx.RebootNeededOnLiveImageException()
                                 self.log_operation_end(error=e)
                                 raise e
 
                         if self.__new_be == True:
                                 try:
-                                        be.init_image_recovery(self.__img,
+                                        be.init_image_recovery(self._img,
                                             self.__be_name)
                                 except Exception, e:
                                         self.log_operation_end(error=e)
@@ -925,12 +1063,12 @@
                                         self.log_operation_end(error=exc_type)
                                         raise
                                 # check if things gained underneath us
-                                if self.__img.is_liveroot():
+                                if self._img.is_liveroot():
                                         e = apx.UnableToCopyBE()
                                         self.log_operation_end(error=e)
                                         raise e
                         try:
-                                self.__img.imageplan.execute()
+                                self._img.imageplan.execute()
                         except RuntimeError, e:
                                 if self.__new_be == True:
                                         be.restore_image()
@@ -984,25 +1122,25 @@
 
                         self.__finished_execution(be)
                 finally:
-                        self.__img.cleanup_downloads()
-                        if self.__img.locked:
-                                self.__img.unlock()
-                        self.__activity_lock.release()
+                        self._img.cleanup_downloads()
+                        if self._img.locked:
+                                self._img.unlock()
+                        self._activity_lock.release()
 
         def __finished_execution(self, be):
-                if self.__img.imageplan.state != EXECUTED_OK:
+                if self._img.imageplan.state != EXECUTED_OK:
                         if self.__new_be == True:
                                 be.restore_image()
                         else:
                                 be.restore_install_uninstall()
 
                         error = apx.ImageplanStateException(
-                            self.__img.imageplan.state)
+                            self._img.imageplan.state)
                         # Must be done after bootenv restore.
                         self.log_operation_end(error=error)
                         raise error
 
-                if self.__img.imageplan.boot_archive_needed() or \
+                if self._img.imageplan.boot_archive_needed() or \
                     self.__new_be:
                         be.update_boot_archive()
 
@@ -1010,11 +1148,11 @@
                         be.activate_image()
                 else:
                         be.activate_install_uninstall()
-                self.__img.cleanup_cached_content()
+                self._img.cleanup_cached_content()
                 # If the end of the operation wasn't already logged
                 # by one of the previous operations, then log it as
                 # ending now.
-                if self.__img.history.operation_name:
+                if self._img.history.operation_name:
                         self.log_operation_end()
                 self.__executed = True
 
@@ -1035,25 +1173,25 @@
                         False   sets displayed status to False
                         True    sets displayed status to True"""
 
-                self.__acquire_activity_lock()
+                self._acquire_activity_lock()
                 try:
                         try:
-                                self.__disable_cancel()
+                                self._disable_cancel()
                         except apx.CanceledException:
-                                self.__cancel_done()
+                                self._cancel_done()
                                 raise
 
-                        if not self.__img.imageplan:
+                        if not self._img.imageplan:
                                 raise apx.PlanMissingException()
 
-                        for pp in self.__img.imageplan.pkg_plans:
+                        for pp in self._img.imageplan.pkg_plans:
                                 if pp.destination_fmri == pfmri:
                                         pp.set_license_status(plicense,
                                             accepted=accepted,
                                             displayed=displayed)
                                         break
                 finally:
-                        self.__activity_lock.release()
+                        self._activity_lock.release()
 
         def refresh(self, full_refresh=False, pubs=None, immediate=False):
                 """Refreshes the metadata (e.g. catalog) for one or more
@@ -1077,35 +1215,35 @@
                 Currently returns an image object, allowing existing code to
                 work while the rest of the API is put into place."""
 
-                self.__acquire_activity_lock()
+                self._acquire_activity_lock()
                 try:
-                        self.__disable_cancel()
-                        self.__img.lock()
+                        self._disable_cancel()
+                        self._img.lock()
                         try:
                                 self.__refresh(full_refresh=full_refresh,
                                     pubs=pubs, immediate=immediate)
-                                return self.__img
+                                return self._img
                         finally:
-                                self.__img.unlock()
-                                self.__img.cleanup_downloads()
+                                self._img.unlock()
+                                self._img.cleanup_downloads()
                 except apx.CanceledException:
-                        self.__cancel_done()
+                        self._cancel_done()
                         raise
                 finally:
                         try:
                                 if int(os.environ.get("PKG_DUMP_STATS", 0)) > 0:
-                                        self.__img.transport.stats.dump()
+                                        self._img.transport.stats.dump()
                         except ValueError:
                                 # Don't generate stats if an invalid value
                                 # is supplied.
                                 pass
-                        self.__activity_lock.release()
+                        self._activity_lock.release()
 
         def __refresh(self, full_refresh=False, pubs=None, immediate=False):
                 """Private refresh method; caller responsible for locking and
                 cleanup."""
 
-                self.__img.refresh_publishers(full_refresh=full_refresh,
+                self._img.refresh_publishers(full_refresh=full_refresh,
                     immediate=immediate, pubs=pubs,
                     progtrack=self.__progresstracker)
 
@@ -1115,11 +1253,13 @@
                 license_lst = []
                 for lic in mfst.gen_actions_by_type("license"):
                         license_lst.append(LicenseInfo(pfmri, lic,
-                            img=self.__img))
+                            img=self._img))
                 return license_lst
 
-        def get_pkg_categories(self, installed=False, pubs=misc.EmptyI):
-                """Returns an order list of tuples of the form (scheme,
+        @_LockedCancelable()
+        def get_pkg_categories(self, installed=False, pubs=misc.EmptyI,
+            repos=None):
+                """Returns an ordered list of tuples of the form (scheme,
                 category) containing the names of all categories in use by
                 the last version of each unique package in the catalog on a
                 per-publisher basis.
@@ -1131,27 +1271,42 @@
                 instead.
 
                 'pubs' is an optional list of publisher prefixes to restrict
-                the results to."""
+                the results to.
+
+                'repos' is a list of URI strings or RepositoryURI objects that
+                represent the locations of package repositories to list packages
+                for.
+                """
 
                 if installed:
-                        img_cat = self.__img.get_catalog(
-                            self.__img.IMG_CATALOG_INSTALLED)
                         excludes = misc.EmptyI
                 else:
-                        img_cat = self.__img.get_catalog(
-                            self.__img.IMG_CATALOG_KNOWN)
-                        excludes = self.__img.list_excludes()
-                return sorted(img_cat.categories(excludes=excludes, pubs=pubs))
-
-        def __map_installed_newest(self, brelease, pubs):
+                        excludes = self._img.list_excludes()
+
+                if repos:
+                        ignored, ignored, known_cat, inst_cat = \
+                            self.__get_alt_pkg_data(repos)
+                        if installed:
+                                pkg_cat = inst_cat
+                        else:
+                                pkg_cat = known_cat
+                elif installed:
+                        pkg_cat = self._img.get_catalog(
+                            self._img.IMG_CATALOG_INSTALLED)
+                else:
+                        pkg_cat = self._img.get_catalog(
+                            self._img.IMG_CATALOG_KNOWN)
+                return sorted(pkg_cat.categories(excludes=excludes, pubs=pubs))
+
+        def __map_installed_newest(self, brelease, pubs, known_cat=None):
                 """Private function.  Maps incorporations and publisher
                 relationships for installed packages and returns them
                 as a tuple of (pub_ranks, inc_stems, inc_vers, inst_stems,
                 ren_stems, ren_inst_stems).
                 """
 
-                img_cat = self.__img.get_catalog(
-                    self.__img.IMG_CATALOG_INSTALLED)
+                img_cat = self._img.get_catalog(
+                    self._img.IMG_CATALOG_INSTALLED)
                 cat_info = frozenset([img_cat.DEPENDENCY])
 
                 inst_stems = {}
@@ -1161,7 +1316,7 @@
                 inc_stems = {}
                 inc_vers = {}
 
-                pub_ranks = self.__img.get_publisher_ranks()
+                pub_ranks = self._img.get_publisher_ranks()
 
                 # The incorporation list should include all installed,
                 # incorporated packages from all publishers.
@@ -1254,11 +1409,13 @@
                         # Package should not be checked.
                         return False
 
-                img_cat = self.__img.get_catalog(self.__img.IMG_CATALOG_KNOWN)
+                if not known_cat:
+                        known_cat = self._img.get_catalog(
+                            self._img.IMG_CATALOG_KNOWN)
 
                 # Find terminal rename entry for all known packages not
                 # rejected by check_stem().
-                for t, entry, actions in img_cat.entry_actions(cat_info,
+                for t, entry, actions in known_cat.entry_actions(cat_info,
                     cb=check_stem, last=True):
                         pkgr = False
                         targets = set()
@@ -1303,16 +1460,372 @@
                 for p in sorted(pub_ranks, cmp=pub_order):
                         if pubs and p not in pubs:
                                 continue
-                        for stem in img_cat.names(pubs=[p]):
+                        for stem in known_cat.names(pubs=[p]):
                                 if stem in inc_vers:
                                         inc_stems.setdefault(stem, p)
 
                 return (pub_ranks, inc_stems, inc_vers, inst_stems, ren_stems,
                     ren_inst_stems)
 
+        def __get_temp_repo_pubs(self, repos):
+                """Private helper function to retrieve publisher information
+                from list of temporary repositories.  Caller is responsible
+                for locking."""
+
+                ret_pubs = []
+                for repo_uri in repos:
+                        if isinstance(repo_uri, basestring):
+                                repo = publisher.RepositoryURI(repo_uri)
+                        else:
+                                # Already a RepositoryURI.
+                                repo = repo_uri
+
+                        pubs = None
+                        try:
+                                pubs = self._img.transport.get_publisherdata(
+                                    repo, ccancel=self.__check_cancelation)
+                        except apx.UnsupportedRepositoryOperation:
+                                raise apx.RepoPubConfigUnavailable(
+                                    location=str(repo))
+
+                        if not pubs:
+                                # Empty repository configuration.
+                                raise apx.RepoPubConfigUnavailable(
+                                    location=str(repo))
+
+                        for p in pubs:
+                                psrepo = p.selected_repository
+                                if not psrepo:
+                                        # Repository configuration info wasn't
+                                        # provided, so assume origin is
+                                        # repo_uri.
+                                        p.add_repository(publisher.Repository(
+                                            origins=[repo_uri]))
+                                elif not psrepo.origins:
+                                        # Repository configuration was provided,
+                                        # but without an origin.  Assume the
+                                        # repo_uri is the origin.
+                                        psrepo.add_origin(repo_uri)
+                                elif repo not in psrepo.origins:
+                                        # If the repo_uri used is not
+                                        # in the list of sources, then
+                                        # add it as the first origin.
+                                        psrepo.origins.insert(0, repo)
+                        ret_pubs.extend(pubs)
+
+                return sorted(ret_pubs)
+
+        def __get_alt_pkg_data(self, repos):
+                """Private helper function to retrieve composite known and
+                installed catalog and package repository map for temporary
+                set of package repositories.  Returns (pkg_pub_map, alt_pubs,
+                known_cat, inst_cat)."""
+
+                repos = set(repos)
+                eid = ",".join(sorted(map(str, repos)))
+                try:
+                        return self.__alt_sources[eid]
+                except KeyError:
+                        # Need to cache new set of alternate sources.
+                        pass
+
+                img_inst_cat = self._img.get_catalog(
+                    self._img.IMG_CATALOG_INSTALLED)
+                op_time = datetime.datetime.utcnow()
+                pubs = self.__get_temp_repo_pubs(repos)
+                progtrack = self.__progresstracker
+
+                # Create temporary directories.
+                tmpdir = tempfile.mkdtemp()
+
+                pkg_repos = {}
+                pkg_pub_map = {}
+                try:
+                        progtrack.refresh_start(len(pubs))
+                        pub_cats = []
+                        for pub in pubs:
+                                # Assign a temporary meta root to each
+                                # publisher.
+                                meta_root = os.path.join(tmpdir, str(id(pub)))
+                                misc.makedirs(meta_root)
+                                pub.meta_root = meta_root
+                                pub.transport = self._img.transport
+                                repo = pub.selected_repository
+                                pkg_repos[id(repo)] = repo
+
+                                # Retrieve each publisher's catalog.
+                                progtrack.refresh_progress(pub.prefix)
+                                pub.refresh()
+                                pub_cats.append((
+                                    pub.prefix,
+                                    repo,
+                                    pub.catalog
+                                ))
+
+                        progtrack.refresh_done()
+
+                        # Determine upgradability.
+                        newest = {}
+                        for pfx, repo, cat in [(None, None, img_inst_cat)] + \
+                            pub_cats:
+                                if pfx:
+                                        pkg_list = cat.fmris(last=True,
+                                            pubs=[pfx])
+                                else:
+                                        pkg_list = cat.fmris(last=True)
+
+                                for f in pkg_list:
+                                        nver, snver = newest.get(f.pkg_name,
+                                            (None, None))
+                                        if f.version > nver:
+                                                newest[f.pkg_name] = (f.version,
+                                                    str(f.version))
+
+                        # Build list of installed packages.
+                        inst_stems = {}
+                        for t, entry in img_inst_cat.tuple_entries():
+                                states = entry["metadata"]["states"]
+                                if self._img.PKG_STATE_INSTALLED not in states:
+                                        continue
+                                pub, stem, ver = t
+                                inst_stems.setdefault(pub, {})
+                                inst_stems[pub].setdefault(stem, {})
+                                inst_stems[pub][stem][ver] = False
+
+                        # Now create composite known and installed catalogs.
+                        compicat = pkg.catalog.Catalog(batch_mode=True,
+                            sign=False)
+                        compkcat = pkg.catalog.Catalog(batch_mode=True,
+                            sign=False)
+
+                        sparts = (
+                           (pfx, cat, repo, name, cat.get_part(name, must_exist=True))
+                           for pfx, repo, cat in pub_cats
+                           for name in cat.parts
+                        )
+
+                        excludes = self._img.list_excludes()
+                        proc_stems = {}
+                        for pfx, cat, repo, name, spart in sparts:
+                                # 'spart' is the source part.
+                                if spart is None:
+                                        # Client hasn't retrieved this part.
+                                        continue
+
+                                # New known part.
+                                nkpart = compkcat.get_part(name)
+                                nipart = compicat.get_part(name)
+                                base = name.startswith("catalog.base.")
+
+                                # Avoid accessor overhead since these will be
+                                # used for every entry.
+                                cat_ver = cat.version
+                                dp = cat.get_part("catalog.dependency.C",
+                                    must_exist=True)
+
+                                for t, sentry in spart.tuple_entries(pubs=[pfx]):
+                                        pub, stem, ver = t
+
+                                        pkg_pub_map.setdefault(pub, {})
+                                        pkg_pub_map[pub].setdefault(stem, {})
+                                        pkg_pub_map[pub][stem].setdefault(ver,
+                                            set())
+                                        pkg_pub_map[pub][stem][ver].add(
+                                            id(repo))
+
+                                        if pub in proc_stems and \
+                                            stem in proc_stems[pub] and \
+                                            ver in proc_stems[pub][stem]:
+                                                if id(cat) != proc_stems[pub][stem][ver]:
+                                                        # Already added from another
+                                                        # catalog.
+                                                        continue
+                                        else:
+                                                proc_stems.setdefault(pub, {})
+                                                proc_stems[pub].setdefault(stem,
+                                                    {})
+                                                proc_stems[pub][stem][ver] = \
+                                                    id(cat)
+
+                                        installed = False
+                                        if pub in inst_stems and \
+                                            stem in inst_stems[pub] and \
+                                            ver in inst_stems[pub][stem]:
+                                                installed = True
+                                                inst_stems[pub][stem][ver] = \
+                                                    True
+
+                                        # copy() is too slow here and catalog
+                                        # entries are shallow so this should be
+                                        # sufficient.
+                                        entry = dict(sentry.iteritems())
+                                        if not base:
+                                                # Nothing else to do except add
+                                                # the entry for non-base catalog
+                                                # parts.
+                                                nkpart.add(metadata=entry,
+                                                    op_time=op_time, pub=pub,
+                                                    stem=stem, ver=ver)
+                                                if installed:
+                                                        nipart.add(
+                                                            metadata=entry,
+                                                            op_time=op_time,
+                                                            pub=pub, stem=stem,
+                                                            ver=ver)
+                                                continue
+
+                                        # Only the base catalog part stores
+                                        # package state information and/or
+                                        # other metadata.
+                                        mdata = entry["metadata"] = {}
+                                        states = [self._img.PKG_STATE_KNOWN,
+                                            self._img.PKG_STATE_ALT_SOURCE]
+                                        if cat_ver == 0:
+                                                states.append(
+                                                    self._img.PKG_STATE_V0)
+                                        else:
+                                                # Assume V1 catalog source.
+                                                states.append(
+                                                    self._img.PKG_STATE_V1)
+
+                                        if installed:
+                                                states.append(
+                                                    self._img.PKG_STATE_INSTALLED)
+
+                                        nver, snver = newest.get(stem,
+                                            (None, None))
+                                        if snver is not None and ver != snver:
+                                                states.append(
+                                                    self._img.PKG_STATE_UPGRADABLE)
+
+                                        # Determine if package is obsolete or
+                                        # has been renamed and mark with
+                                        # appropriate state.
+                                        dpent = None
+                                        if dp is not None:
+                                                dpent = dp.get_entry(pub=pub,
+                                                    stem=stem, ver=ver)
+                                        if dpent is not None:
+                                                for a in dpent["actions"]:
+                                                        # Constructing action
+                                                        # objects for every
+                                                        # action would be a lot
+                                                        # slower, so a simple
+                                                        # string match is done
+                                                        # first so that only
+                                                        # interesting actions
+                                                        # get constructed.
+                                                        if not a.startswith("set"):
+                                                                continue
+                                                        if not ("pkg.obsolete" in a or \
+                                                            "pkg.renamed" in a):
+                                                                continue
+
+                                                        try:
+                                                                act = pkg.actions.fromstr(a)
+                                                        except pkg.actions.ActionError:
+                                                                # If the action can't be
+                                                                # parsed or is not yet
+                                                                # supported, continue.
+                                                                continue
+
+                                                        if act.attrs["value"].lower() != "true":
+                                                                continue
+
+                                                        if act.attrs["name"] == "pkg.obsolete":
+                                                                states.append(
+                                                                    self._img.PKG_STATE_OBSOLETE)
+                                                        elif act.attrs["name"] == "pkg.renamed":
+                                                                if not act.include_this(
+                                                                    excludes):
+                                                                        continue
+                                                                states.append(
+                                                                    self._img.PKG_STATE_RENAMED)
+
+                                        mdata["states"] = states
+
+                                        # Add base entries.
+                                        nkpart.add(metadata=entry,
+                                            op_time=op_time, pub=pub, stem=stem,
+                                            ver=ver)
+                                        if installed:
+                                                nipart.add(metadata=entry,
+                                                    op_time=op_time, pub=pub,
+                                                    stem=stem, ver=ver)
+
+                        # Build a unique set of publisher objects so that
+                        # signing information can be consolidated and
+                        # used.  (If this isn't done, signed packages
+                        # can't be installed from temporary sources.)
+                        pub_map = {}
+                        for pub in pubs:
+                                try:
+                                        opub = pub_map[pub.prefix]
+                                except KeyError:
+                                        opub = publisher.Publisher(pub.prefix,
+                                            catalog=compkcat)
+                                        pub_map[pub.prefix] = opub
+
+                                for attr in ("signing_ca_certs",
+                                    "intermediate_certs"):
+                                        getattr(opub, attr).extend(
+                                            getattr(pub, attr))
+
+                        rid_map = {}
+                        for pub in pkg_pub_map:
+                                for stem in pkg_pub_map[pub]:
+                                        for ver in pkg_pub_map[pub][stem]:
+                                                rids = tuple(sorted(
+                                                    pkg_pub_map[pub][stem][ver]))
+
+                                                if not rids in rid_map:
+                                                        # Create a publisher and
+                                                        # repository for this
+                                                        # unique set of origins.
+                                                        origins = []
+                                                        map(origins.extend, [
+                                                           pkg_repos.get(rid).origins
+                                                           for rid in rids
+                                                        ])
+                                                        nrepo = publisher.Repository(
+                                                            origins=origins)
+                                                        npub = \
+                                                            copy.copy(pub_map[pub])
+                                                        npub.add_repository(nrepo)
+                                                        rid_map[rids] = npub
+
+                                                pkg_pub_map[pub][stem][ver] = \
+                                                    rid_map[rids]
+
+                        # Now consolidate all origins for each publisher under
+                        # a single repository object for the caller.
+                        for pub in pubs:
+                                npub = pub_map[pub.prefix]
+                                nrepo = npub.selected_repository
+                                if not nrepo:
+                                        nrepo = publisher.Repository()
+                                        npub.add_repository(nrepo)
+                                for o in pub.selected_repository.origins:
+                                        if not nrepo.has_origin(o):
+                                                nrepo.add_origin(o)
+
+                        for compcat in (compicat, compkcat):
+                                compcat.batch_mode = False
+                                compcat.finalize()
+                                compcat.read_only = True
+
+                        # Cache these for future callers.
+                        self.__alt_sources[eid] = (pkg_pub_map,
+                            sorted(pub_map.values()), compkcat, compicat)
+                        return self.__alt_sources[eid]
+                finally:
+                        shutil.rmtree(tmpdir, ignore_errors=True)
+                        self._img.cleanup_downloads()
+
+        @_LockedGenerator()
         def get_pkg_list(self, pkg_list, cats=None, patterns=misc.EmptyI,
-            pubs=misc.EmptyI, raise_unmatched=False, return_fmris=False,
-            variants=False):
+            pubs=misc.EmptyI, raise_unmatched=False, repos=None,
+            return_fmris=False, variants=False):
                 """A generator function that produces tuples of the form:
 
                     (
@@ -1371,6 +1884,10 @@
                 (after applying all other filtering and returning all results)
                 didn't match any packages.
 
+                'repos' is a list of URI strings or RepositoryURI objects that
+                represent the locations of package repositories to list packages
+                for.
+
                 'return_fmris' is an optional boolean value that indicates that
                 an FMRI object should be returned in place of the (pub, stem,
                 ver) tuple that is normally returned.
@@ -1382,6 +1899,21 @@
                 Please note that this function may invoke network operations
                 to retrieve the requested package information."""
 
+                return self.__get_pkg_list(pkg_list, cats=cats,
+                    patterns=patterns, pubs=pubs,
+                    raise_unmatched=raise_unmatched, repos=repos,
+                    return_fmris=return_fmris, variants=variants)
+
+        def __get_pkg_list(self, pkg_list, cats=None, inst_cat=None,
+            known_cat=None, patterns=misc.EmptyI, pubs=misc.EmptyI,
+            raise_unmatched=False, repos=None, return_fmris=False,
+            variants=False):
+                """This is the implementation of get_pkg_list.  The other
+                function is a wrapper that uses locking.  The separation was
+                necessary because of API functions that already perform locking
+                but need to use get_pkg_list().  This is a generator
+                function."""
+
                 installed = inst_newest = newest = upgradable = False
                 if pkg_list == self.LIST_INSTALLED:
                         installed = True
@@ -1392,7 +1924,7 @@
                 elif pkg_list == self.LIST_UPGRADABLE:
                         upgradable = True
 
-                brelease = self.__img.attrs["Build-Release"]
+                brelease = self._img.attrs["Build-Release"]
 
                 # Each pattern in patterns can be a partial or full FMRI, so
                 # extract the individual components for use in filtering.
@@ -1439,29 +1971,38 @@
                 if illegals:
                         raise apx.InventoryException(illegal=illegals)
 
+                if repos:
+                        ignored, ignored, known_cat, inst_cat = \
+                            self.__get_alt_pkg_data(repos)
+
                 # For LIST_INSTALLED_NEWEST, installed packages need to be
                 # determined and incorporation and publisher relationships
                 # mapped.
                 if inst_newest:
                         pub_ranks, inc_stems, inc_vers, inst_stems, ren_stems, \
                             ren_inst_stems = self.__map_installed_newest(
-                            brelease, pubs)
+                            brelease, pubs, known_cat=known_cat)
                 else:
                         pub_ranks = inc_stems = inc_vers = inst_stems = \
                             ren_stems = ren_inst_stems = misc.EmptyDict
 
                 if installed or upgradable:
-                        img_cat = self.__img.get_catalog(
-                            self.__img.IMG_CATALOG_INSTALLED)
+                        if inst_cat:
+                                pkg_cat = inst_cat
+                        else:
+                                pkg_cat = self._img.get_catalog(
+                                    self._img.IMG_CATALOG_INSTALLED)
 
                         # Don't need to perform variant filtering if only
                         # listing installed packages.
                         variants = True
+                elif known_cat:
+                        pkg_cat = known_cat
                 else:
-                        img_cat = self.__img.get_catalog(
-                            self.__img.IMG_CATALOG_KNOWN)
-
-                cat_info = frozenset([img_cat.DEPENDENCY, img_cat.SUMMARY])
+                        pkg_cat = self._img.get_catalog(
+                            self._img.IMG_CATALOG_KNOWN)
+
+                cat_info = frozenset([pkg_cat.DEPENDENCY, pkg_cat.SUMMARY])
 
                 # Keep track of when the newest version has been found for
                 # each incorporated stem.
@@ -1473,8 +2014,8 @@
 
                 def check_state(t, entry):
                         states = entry["metadata"]["states"]
-                        pkgi = self.__img.PKG_STATE_INSTALLED in states
-                        pkgu = self.__img.PKG_STATE_UPGRADABLE in states
+                        pkgi = self._img.PKG_STATE_INSTALLED in states
+                        pkgu = self._img.PKG_STATE_UPGRADABLE in states
                         pub, stem, ver = t
 
                         if upgradable:
@@ -1548,8 +2089,8 @@
                         # Filtering needs to be applied.
                         filter_cb = check_state
 
-                excludes = self.__img.list_excludes()
-                img_variants = self.__img.get_variants()
+                excludes = self._img.list_excludes()
+                img_variants = self._img.get_variants()
 
                 matched_pats = set()
                 pkg_matching_pats = None
@@ -1560,7 +2101,7 @@
                 # to be filtered.)
                 use_last = newest and not pat_versioned and variants
 
-                for t, entry, actions in img_cat.entry_actions(cat_info,
+                for t, entry, actions in pkg_cat.entry_actions(cat_info,
                     cb=filter_cb, excludes=excludes, last=use_last,
                     ordered=True, pubs=pubs):
                         pub, stem, ver = t
@@ -1674,7 +2215,7 @@
 
                         omit_var = False
                         states = entry["metadata"]["states"]
-                        pkgi = self.__img.PKG_STATE_INSTALLED in states
+                        pkgi = self._img.PKG_STATE_INSTALLED in states
                         try:
                                 for a in actions:
                                         if a.name == "depend" and \
@@ -1807,28 +2348,19 @@
                         if raise_unmatched and notfound:
                                 raise apx.InventoryException(notfound=notfound)
 
-        def info(self, fmri_strings, local, info_needed):
+        @_LockedCancelable()
+        def info(self, fmri_strings, local, info_needed, repos=None):
                 """Gathers information about fmris.  fmri_strings is a list
                 of fmri_names for which information is desired.  local
                 determines whether to retrieve the information locally
                 (if possible).  It returns a dictionary of lists.  The keys
                 for the dictionary are the constants specified in the class
                 definition.  The values are lists of PackageInfo objects or
-                strings."""
-
-                # Currently, this is mostly a wapper for activity locking.
-                self.__acquire_activity_lock()
-                try:
-                        i = self._info_op(fmri_strings, local, info_needed)
-                finally:
-                        self.__img.cleanup_downloads()
-                        self.__activity_lock.release()
-
-                return i
-
-        def _info_op(self, fmri_strings, local, info_needed):
-                """Performs the actual info operation.  The external
-                interface to the API's consumers is defined in info()."""
+                strings.
+
+                'repos' is a list of URI strings or RepositoryURI objects that
+                represent the locations of packages to return information for.
+                """
 
                 bad_opts = info_needed - PackageInfo.ALL_OPTIONS
                 if bad_opts:
@@ -1836,25 +2368,40 @@
 
                 self.log_operation_start("info")
 
-                if local is True:
-                        img_cat = self.__img.get_catalog(
-                            self.__img.IMG_CATALOG_INSTALLED)
-                        if not fmri_strings and img_cat.package_count == 0:
-                                self.log_operation_end(
-                                    result=history.RESULT_NOTHING_TO_DO)
-                                raise apx.NoPackagesInstalledException()
+                # Common logic for image and temp repos case.
+                if local:
                         ilist = self.LIST_INSTALLED
                 else:
                         # Verify validity of certificates before attempting
                         # network operations.
-                        self.__cert_verify(
-                            log_op_end=[apx.CertificateError])
-
-                        img_cat = self.__img.get_catalog(
-                            self.__img.IMG_CATALOG_KNOWN)
+                        self.__cert_verify(log_op_end=[apx.CertificateError])
                         ilist = self.LIST_NEWEST
 
-                excludes = self.__img.list_excludes()
+                # The pkg_pub_map is only populated when temp repos are
+                # specified and maps packages to the repositories that
+                # contain them for manifest retrieval.
+                pkg_pub_map = None
+                known_cat = None
+                inst_cat = None
+                if repos:
+                        pkg_pub_map, ignored, known_cat, inst_cat = \
+                            self.__get_alt_pkg_data(repos)
+                        if local:
+                                pkg_cat = inst_cat
+                        else:
+                                pkg_cat = known_cat
+                elif local:
+                        pkg_cat = self._img.get_catalog(
+                            self._img.IMG_CATALOG_INSTALLED)
+                        if not fmri_strings and pkg_cat.package_count == 0:
+                                self.log_operation_end(
+                                    result=history.RESULT_NOTHING_TO_DO)
+                                raise apx.NoPackagesInstalledException()
+                else:
+                        pkg_cat = self._img.get_catalog(
+                            self._img.IMG_CATALOG_KNOWN)
+
+                excludes = self._img.list_excludes()
 
                 # Set of options that can use catalog data.
                 cat_opts = frozenset([PackageInfo.DESCRIPTION,
@@ -1872,20 +2419,28 @@
                 }
 
                 try:
-                        for pfmri, summary, cats, states in self.get_pkg_list(
-                            ilist, patterns=fmri_strings, raise_unmatched=True,
+                        for pfmri, summary, cats, states in self.__get_pkg_list(
+                            ilist, inst_cat=inst_cat, known_cat=known_cat,
+                            patterns=fmri_strings, raise_unmatched=True,
                             return_fmris=True, variants=True):
-                                pub = name = version = release = \
-                                    build_release = branch = \
+                                release = build_release = branch = \
                                     packaging_date = None
+
+                                pub, name, version = pfmri.tuple()
+                                alt_pub = None
+                                if pkg_pub_map:
+                                        alt_pub = \
+                                            pkg_pub_map[pub][name][str(version)]
+
                                 if PackageInfo.IDENTITY in info_needed:
-                                        pub, name, version = pfmri.tuple()
                                         release = version.release
                                         build_release = version.build_release
                                         branch = version.branch
                                         packaging_date = \
                                             version.get_timestamp().strftime(
                                             "%c")
+                                else:
+                                        pub = name = version = None
 
                                 links = hardlinks = files = dirs = \
                                     size = licenses = cat_info = \
@@ -1904,7 +2459,7 @@
                                         try:
                                                 ignored, description, ignored, \
                                                     dependencies = \
-                                                    _get_pkg_cat_data(img_cat,
+                                                    _get_pkg_cat_data(pkg_cat,
                                                         ret_cat_data,
                                                         excludes=excludes,
                                                         pfmri=pfmri)
@@ -1924,8 +2479,8 @@
                                     PackageInfo.LICENSES]) | act_opts) & \
                                     info_needed:
                                         try:
-                                                mfst = self.__img.get_manifest(
-                                                    pfmri)
+                                                mfst = self._img.get_manifest(
+                                                    pfmri, alt_pub=alt_pub)
                                         except apx.InvalidPackageErrors:
                                                 # If the information can't be
                                                 # retrieved because the manifest
@@ -2011,7 +2566,7 @@
                 """Returns true if the API is in a cancelable state."""
                 return self.__can_be_canceled
 
-        def __disable_cancel(self):
+        def _disable_cancel(self):
                 """Sets_can_be_canceled to False in a way that prevents missed
                 wakeups.  This may raise CanceledException, if a
                 cancellation is pending."""
@@ -2019,13 +2574,13 @@
                 self.__cancel_lock.acquire()
                 if self.__canceling:
                         self.__cancel_lock.release()
-                        self.__img.transport.reset()
+                        self._img.transport.reset()
                         raise apx.CanceledException()
                 else:
                         self.__set_can_be_canceled(False)
                 self.__cancel_lock.release()
 
-        def __enable_cancel(self):
+        def _enable_cancel(self):
                 """Sets can_be_canceled to True while grabbing the cancel
                 locks.  The caller must still hold the activity lock while
                 calling this function."""
@@ -2047,7 +2602,7 @@
                 if status == True:
                         # Callers must hold activity lock for operations
                         # that they will make cancelable.
-                        assert self.__activity_lock._is_owned()
+                        assert self._activity_lock._is_owned()
                         # In any situation where the caller holds the activity
                         # lock and wants to set cancelable to true, a cancel
                         # should not already be in progress.  This is because
@@ -2065,28 +2620,33 @@
                 this does not necessarily return the disk to its initial state
                 since the indexes or download cache may have been changed by
                 the prepare method."""
-                self.__acquire_activity_lock()
+                self._acquire_activity_lock()
                 self.__reset_unlock()
-                self.__activity_lock.release()
+                self._activity_lock.release()
 
         def __reset_unlock(self):
                 """Private method. Provides a way to reset without taking the
                 activity lock. Should only be called by a thread which already
                 holds the activity lock."""
 
-                assert self.__activity_lock._is_owned()
+                assert self._activity_lock._is_owned()
 
                 # This needs to be done first so that find_root can use it.
                 self.__progresstracker.reset()
 
-                self.__img.cleanup_downloads()
-                self.__img.transport.shutdown()
+                # Ensure alternate sources are always cleared in an
+                # exception scenario.
+                self.__set_img_alt_sources(None)
+                self.__alt_sources = {}
+
+                self._img.cleanup_downloads()
+                self._img.transport.shutdown()
                 # Recreate the image object using the path the api
                 # object was created with instead of the current path.
-                self.__img = image.Image(self.__img_path,
+                self._img = image.Image(self._img_path,
                     progtrack=self.__progresstracker,
                     user_provided_dir=True)
-                self.__img.blocking_locks = self.__blocking_locks
+                self._img.blocking_locks = self.__blocking_locks
 
                 self.__plan_desc = None
                 self.__plan_type = None
@@ -2094,7 +2654,7 @@
                 self.__executed = False
                 self.__be_name = None
 
-                self.__cancel_cleanup_exception()
+                self._cancel_cleanup_exception()
 
         def __check_cancelation(self):
                 """Private method. Provides a callback method for internal
@@ -2102,7 +2662,7 @@
                 canceled."""
                 return self.__canceling
 
-        def __cancel_cleanup_exception(self):
+        def _cancel_cleanup_exception(self):
                 """A private method that is called from exception handlers.
                 This is not needed if the method calls reset unlock,
                 which will call this method too.  This catches the case
@@ -2118,7 +2678,7 @@
                 self.__cancel_cv.notify_all()
                 self.__cancel_lock.release()
 
-        def __cancel_done(self):
+        def _cancel_done(self):
                 """A private method that wakes any threads that have been
                 sleeping, waiting for a cancellation to finish."""
 
@@ -2161,6 +2721,7 @@
                 else:
                         self.log_operation_end(error=e)
 
+        @_LockedGenerator()
         def local_search(self, query_lst):
                 """local_search takes a list of Query objects and performs
                 each query against the installed packages of the image."""
@@ -2173,7 +2734,7 @@
                         try:
                                 query = qp.parse(q.text)
                                 query_rr = qp.parse(q.text)
-                                if query_rr.remove_root(self.__img.root):
+                                if query_rr.remove_root(self._img.root):
                                         query.add_or(query_rr)
                                 if q.return_type == \
                                     query_p.Query.RETURN_PACKAGES:
@@ -2182,21 +2743,21 @@
                                 raise apx.BooleanQueryException(e)
                         except query_p.ParseError, e:
                                 raise apx.ParseError(e)
-                        self.__img.update_index_dir()
-                        assert self.__img.index_dir
+                        self._img.update_index_dir()
+                        assert self._img.index_dir
                         try:
                                 query.set_info(num_to_return=q.num_to_return,
                                     start_point=q.start_point,
-                                    index_dir=self.__img.index_dir,
+                                    index_dir=self._img.index_dir,
                                     get_manifest_path=\
-                                        self.__img.get_manifest_path,
+                                        self._img.get_manifest_path,
                                     gen_installed_pkg_names=\
-                                        self.__img.gen_installed_pkg_names,
+                                        self._img.gen_installed_pkg_names,
                                     case_sensitive=q.case_sensitive)
                                 res = query.search(
-                                    self.__img.gen_installed_pkgs,
-                                    self.__img.get_manifest_path,
-                                    self.__img.list_excludes())
+                                    self._img.gen_installed_pkgs,
+                                    self._img.get_manifest_path,
+                                    self._img.list_excludes())
                         except search_errors.InconsistentIndexException, e:
                                 raise apx.InconsistentIndexException(e)
                         # i is being inserted to track which query the results
@@ -2252,6 +2813,7 @@
                 else:
                         raise apx.ServerReturnError(line)
 
+        @_LockedGenerator()
         def remote_search(self, query_str_and_args_lst, servers=None,
             prune_versions=True):
                 """This function takes a list of Query objects, and optionally
@@ -2270,49 +2832,12 @@
                 it is possible to get deadlocks or NRLock reentrance
                 exceptions."""
 
-                clean_exit = True
-                canceled = False
-
-                self.__acquire_activity_lock()
-                self.__enable_cancel()
-                try:
-                        for r in self._remote_search(query_str_and_args_lst,
-                            servers, prune_versions):
-                                yield r
-                except GeneratorExit:
-                        return
-                except apx.CanceledException:
-                        canceled = True
-                        raise
-                except Exception:
-                        clean_exit = False
-                        raise
-                finally:
-                        if canceled:
-                                self.__cancel_done()
-                        elif clean_exit:
-                                try:
-                                        self.__disable_cancel()
-                                except apx.CanceledException:
-                                        self.__cancel_done()
-                                        self.__activity_lock.release()
-                                        raise
-                        else:
-                                self.__cancel_cleanup_exception()
-                        self.__activity_lock.release()
-
-        def _remote_search(self, query_str_and_args_lst, servers=None,
-            prune_versions=True):
-                """This is the implementation of remote_search.  The other
-                function is a wrapper that handles locking and exception
-                handling.  This is a generator function."""
-
                 failed = []
                 invalid = []
                 unsupported = []
 
                 if not servers:
-                        servers = self.__img.gen_publishers()
+                        servers = self._img.gen_publishers()
 
                 new_qs = []
                 l = query_p.QueryLexer()
@@ -2322,7 +2847,7 @@
                         try:
                                 query = qp.parse(q.text)
                                 query_rr = qp.parse(q.text)
-                                if query_rr.remove_root(self.__img.root):
+                                if query_rr.remove_root(self._img.root):
                                         query.add_or(query_rr)
                                 if q.return_type == \
                                     query_p.Query.RETURN_PACKAGES:
@@ -2348,7 +2873,7 @@
                         if isinstance(pub, dict):
                                 origin = pub["origin"]
                                 try:
-                                        pub = self.__img.get_publisher(
+                                        pub = self._img.get_publisher(
                                             origin=origin)
                                 except apx.UnknownPublisher:
                                         pub = publisher.RepositoryURI(origin)
@@ -2358,7 +2883,7 @@
                                 descriptive_name = pub.prefix
 
                         try:
-                                res = self.__img.transport.do_search(pub,
+                                res = self._img.transport.do_search(pub,
                                     query_str_and_args_lst,
                                     ccancel=self.__check_cancelation)
                         except apx.CanceledException:
@@ -2425,10 +2950,10 @@
                 # This maps fmris to the version at which they're incorporated.
                 inc_vers = {}
                 inst_stems = {}
-                brelease = self.__img.attrs["Build-Release"]
-
-                img_cat = self.__img.get_catalog(
-                    self.__img.IMG_CATALOG_INSTALLED)
+                brelease = self._img.attrs["Build-Release"]
+
+                img_cat = self._img.get_catalog(
+                    self._img.IMG_CATALOG_INSTALLED)
                 cat_info = frozenset([img_cat.DEPENDENCY])
 
                 # The incorporation list should include all installed,
@@ -2482,16 +3007,16 @@
                 performing the incremental update which is usually used.
                 This is useful for times when the index for the client has
                 been corrupted."""
-                self.__img.update_index_dir()
+                self._img.update_index_dir()
                 self.log_operation_start("rebuild-index")
-                if not os.path.isdir(self.__img.index_dir):
-                        self.__img.mkdirs()
+                if not os.path.isdir(self._img.index_dir):
+                        self._img.mkdirs()
                 try:
-                        ind = indexer.Indexer(self.__img, self.__img.get_manifest,
-                            self.__img.get_manifest_path,
-                            self.__progresstracker, self.__img.list_excludes())
+                        ind = indexer.Indexer(self._img, self._img.get_manifest,
+                            self._img.get_manifest_path,
+                            self.__progresstracker, self._img.list_excludes())
                         ind.rebuild_index_from_scratch(
-                            self.__img.gen_installed_pkgs())
+                            self._img.gen_installed_pkgs())
                 except search_errors.ProblematicPermissionsIndexException, e:
                         error = apx.ProblematicPermissionsIndexException(e)
                         self.log_operation_end(error=error)
@@ -2499,14 +3024,25 @@
                 else:
                         self.log_operation_end()
 
-        def get_manifest(self, pfmri, all_variants=True):
+        def get_manifest(self, pfmri, all_variants=True, repos=None):
                 """Returns the Manifest object for the given package FMRI.
 
                 'all_variants' is an optional boolean value indicating whther
                 the manifest should include metadata for all variants.
+
+                'repos' is a list of URI strings or RepositoryURI objects that
+                represent the locations of additional sources of package data to
+                use during the planned operation.
                 """
 
-                return self.__img.get_manifest(pfmri, all_variants=all_variants)
+                alt_pub = None
+                if repos:
+                        pkg_pub_map, ignored, known_cat, inst_cat = \
+                            self.__get_alt_pkg_data(repos)
+                        alt_pub = pkg_pub_map.get(pfmri.publisher, {}).get(
+                            pfmri.pkg_name, {}).get(str(pfmri.version), None)
+                return self._img.get_manifest(pfmri, all_variants=all_variants,
+                    alt_pub=alt_pub)
 
         @staticmethod
         def validate_response(res, v):
@@ -2526,36 +3062,36 @@
                 """Add the provided publisher object to the image
                 configuration."""
                 try:
-                        self.__img.add_publisher(pub,
+                        self._img.add_publisher(pub,
                             refresh_allowed=refresh_allowed,
                             progtrack=self.__progresstracker,
                             approved_cas=approved_cas, revoked_cas=revoked_cas,
                             unset_cas=unset_cas)
                 finally:
-                        self.__img.cleanup_downloads()
+                        self._img.cleanup_downloads()
 
         def get_pub_search_order(self):
                 """Return current search order of publishers; includes
                 disabled publishers"""
-                return self.__img.cfg.get_property("property",
+                return self._img.cfg.get_property("property",
                     "publisher-search-order")
 
         def set_pub_search_after(self, being_moved_prefix, staying_put_prefix):
                 """Change the publisher search order so that being_moved is
                 searched after staying_put"""
-                self.__img.pub_search_after(being_moved_prefix,
+                self._img.pub_search_after(being_moved_prefix,
                     staying_put_prefix)
 
         def set_pub_search_before(self, being_moved_prefix, staying_put_prefix):
                 """Change the publisher search order so that being_moved is
                 searched before staying_put"""
-                self.__img.pub_search_before(being_moved_prefix,
+                self._img.pub_search_before(being_moved_prefix,
                     staying_put_prefix)
 
         def get_preferred_publisher(self):
                 """Returns the preferred publisher object for the image."""
                 return self.get_publisher(
-                    prefix=self.__img.get_preferred_publisher())
+                    prefix=self._img.get_preferred_publisher())
 
         def get_publisher(self, prefix=None, alias=None, duplicate=False):
                 """Retrieves a publisher object matching the provided prefix
@@ -2565,7 +3101,7 @@
                 a copy of the publisher object should be returned instead
                 of the original.
                 """
-                pub = self.__img.get_publisher(prefix=prefix, alias=alias)
+                pub = self._img.get_publisher(prefix=prefix, alias=alias)
                 if duplicate:
                         # Never return the original so that changes to the
                         # retrieved object are not reflected until
@@ -2573,6 +3109,7 @@
                         return copy.copy(pub)
                 return pub
 
+        @_LockedCancelable()
         def get_publisherdata(self, pub=None, repo=None):
                 """Attempts to retrieve publisher configuration information from
                 the specified publisher's repository or the provided repository.
@@ -2595,22 +3132,8 @@
                 # made in the client API for clarity.
                 pub = max(pub, repo)
 
-                self.__activity_lock.acquire()
-                try:
-                        self.__enable_cancel()
-                        data = self.__img.transport.get_publisherdata(pub,
-                            ccancel=self.__check_cancelation)
-                        self.__disable_cancel()
-                        return data
-                except apx.CanceledException:
-                        self.__cancel_done()
-                        raise
-                except:
-                        self.__cancel_cleanup_exception()
-                        raise
-                finally:
-                        self.__img.cleanup_downloads()
-                        self.__activity_lock.release()
+                return self._img.transport.get_publisherdata(pub,
+                    ccancel=self.__check_cancelation)
 
         def get_publishers(self, duplicate=False):
                 """Returns a list of the publisher objects for the current
@@ -2625,12 +3148,12 @@
                         # are not reflected until update_publisher is called.
                         pubs = [
                             copy.copy(p)
-                            for p in self.__img.get_publishers().values()
+                            for p in self._img.get_publishers().values()
                         ]
                 else:
-                        pubs = self.__img.get_publishers().values()
+                        pubs = self._img.get_publishers().values()
                 return misc.get_sorted_publishers(pubs,
-                    preferred=self.__img.get_preferred_publisher())
+                    preferred=self._img.get_preferred_publisher())
 
         def get_publisher_last_update_time(self, prefix=None, alias=None):
                 """Returns a datetime object representing the last time the
@@ -2645,37 +3168,37 @@
                         return None
 
                 dt = None
-                self.__acquire_activity_lock()
+                self._acquire_activity_lock()
                 try:
-                        self.__enable_cancel()
+                        self._enable_cancel()
                         try:
                                 dt = pub.catalog.last_modified
                         except:
                                 self.__reset_unlock()
                                 raise
                         try:
-                                self.__disable_cancel()
+                                self._disable_cancel()
                         except apx.CanceledException:
-                                self.__cancel_done()
+                                self._cancel_done()
                                 raise
                 finally:
-                        self.__activity_lock.release()
+                        self._activity_lock.release()
                 return dt
 
         def has_publisher(self, prefix=None, alias=None):
                 """Returns a boolean value indicating whether a publisher using
                 the given prefix or alias exists."""
-                return self.__img.has_publisher(prefix=prefix, alias=alias)
+                return self._img.has_publisher(prefix=prefix, alias=alias)
 
         def remove_publisher(self, prefix=None, alias=None):
                 """Removes a publisher object matching the provided prefix
                 (name) or alias."""
-                self.__img.remove_publisher(prefix=prefix, alias=alias,
+                self._img.remove_publisher(prefix=prefix, alias=alias,
                     progtrack=self.__progresstracker)
 
         def set_preferred_publisher(self, prefix=None, alias=None):
                 """Sets the preferred publisher for the image."""
-                self.__img.set_preferred_publisher(prefix=prefix, alias=alias)
+                self._img.set_preferred_publisher(prefix=prefix, alias=alias)
 
         def update_publisher(self, pub, refresh_allowed=True):
                 """Replaces an existing publisher object with the provided one
@@ -2687,25 +3210,25 @@
                 repository, mirror, or origin.  If False, no attempt will be
                 made to retrieve publisher metadata."""
 
-                self.__acquire_activity_lock()
+                self._acquire_activity_lock()
                 try:
-                        self.__disable_cancel()
-                        with self.__img.locked_op("update-publisher"):
+                        self._disable_cancel()
+                        with self._img.locked_op("update-publisher"):
                                 return self.__update_publisher(pub,
                                     refresh_allowed=refresh_allowed)
                 except apx.CanceledException, e:
-                        self.__cancel_done()
+                        self._cancel_done()
                         raise
                 finally:
-                        self.__img.cleanup_downloads()
-                        self.__activity_lock.release()
+                        self._img.cleanup_downloads()
+                        self._activity_lock.release()
 
         def __update_publisher(self, pub, refresh_allowed=True):
                 """Private publisher update method; caller responsible for
                 locking."""
 
                 if pub.disabled and \
-                    pub.prefix == self.__img.get_preferred_publisher():
+                    pub.prefix == self._img.get_preferred_publisher():
                         raise apx.SetPreferredPublisherDisabled(
                             pub.prefix)
 
@@ -2748,7 +3271,9 @@
                 refresh_catalog = False
                 disable = False
                 orig_pub = None
-                publishers = self.__img.get_publishers()
+
+                # Configuration must be manipulated directly.
+                publishers = self._img.cfg.publishers
 
                 # First, attempt to match the updated publisher object to an
                 # existing one using the object id that was stored during
@@ -2797,9 +3322,9 @@
 
                                 # Prepare the new publisher object.
                                 pub.meta_root = \
-                                    self.__img._get_publisher_meta_root(
+                                    self._img._get_publisher_meta_root(
                                     pub.prefix)
-                                pub.transport = self.__img.transport
+                                pub.transport = self._img.transport
 
                                 # Finally, add the new publisher object.
                                 publishers[pub.prefix] = pub
@@ -2814,20 +3339,20 @@
                                         break
 
                 repo = pub.selected_repository
-                if not repo.origins:
-                        raise apx.PublisherOriginRequired(pub.prefix)
-
                 validate = origins_changed(orig_pub[-1].selected_repository,
                     pub.selected_repository)
 
                 try:
-                        if disable:
+                        if disable or (not repo.origins and
+                            orig_pub[-1].selected_repository.origins):
                                 # Remove the publisher's metadata (such as
                                 # catalogs, etc.).  This only needs to be done
-                                # in the event that a publisher is disabled; in
-                                # any other case (the origin changing, etc.),
-                                # refresh() will do the right thing.
-                                self.__img.remove_publisher_metadata(pub)
+                                # in the event that a publisher is disabled or
+                                # has transitioned from having origins to not
+                                # having any at all; in any other case (the
+                                # origins changing, etc.), refresh() will do the
+                                # right thing.
+                                self._img.remove_publisher_metadata(pub)
                         elif not pub.disabled and not refresh_catalog:
                                 refresh_catalog = pub.needs_refresh
 
@@ -2837,7 +3362,7 @@
                                 # revalidated.
 
                                 if validate:
-                                        self.__img.transport.valid_publisher_test(
+                                        self._img.transport.valid_publisher_test(
                                             pub)
 
                                 # Validate all new origins against publisher
@@ -2869,8 +3394,7 @@
                         cleanup()
                         raise
 
-                # Successful; so save configuration.
-                self.__img.save_config()
+                self._img.save_config()
 
         def log_operation_end(self, error=None, result=None):
                 """Marks the end of an operation to be recorded in image
@@ -2882,18 +3406,18 @@
                 be based on the class of 'error' and 'error' will be recorded
                 for the current operation.  If 'result' and 'error' is not
                 provided, success is assumed."""
-                self.__img.history.log_operation_end(error=error, result=result)
+                self._img.history.log_operation_end(error=error, result=result)
 
         def log_operation_error(self, error):
                 """Adds an error to the list of errors to be recorded in image
                 history for the current opreation."""
-                self.__img.history.log_operation_error(error)
+                self._img.history.log_operation_error(error)
 
         def log_operation_start(self, name):
                 """Marks the start of an operation to be recorded in image
                 history."""
-                be_name, be_uuid = bootenv.BootEnv.get_be_name(self.__img.root)
-                self.__img.history.log_operation_start(name,
+                be_name, be_uuid = bootenv.BootEnv.get_be_name(self._img.root)
+                self._img.history.log_operation_start(name,
                     be_name=be_name, be_uuid=be_uuid)
 
         def parse_p5i(self, data=None, fileobj=None, location=None):
@@ -2946,7 +3470,7 @@
                                 wildcards.
                 """
 
-                brelease = self.__img.attrs["Build-Release"]
+                brelease = self._img.attrs["Build-Release"]
                 for pat in patterns:
                         error = None
                         matcher = None
@@ -2984,17 +3508,17 @@
                 newest version.  Returns a boolean indicating whether any action
                 was taken."""
 
-                self.__acquire_activity_lock()
+                self._acquire_activity_lock()
                 try:
-                        self.__disable_cancel()
-                        self.__img.allow_ondisk_upgrade = True
-                        return self.__img.update_format(
+                        self._disable_cancel()
+                        self._img.allow_ondisk_upgrade = True
+                        return self._img.update_format(
                             progtrack=self.__progresstracker)
                 except apx.CanceledException, e:
-                        self.__cancel_done()
+                        self._cancel_done()
                         raise
                 finally:
-                        self.__activity_lock.release()
+                        self._activity_lock.release()
 
         def write_p5i(self, fileobj, pkg_names=None, pubs=None):
                 """Writes the publisher, repository, and provided package names
@@ -3021,7 +3545,7 @@
                         plist = []
                         for p in pubs:
                                 if not isinstance(p, publisher.Publisher):
-                                        plist.append(self.__img.get_publisher(
+                                        plist.append(self._img.get_publisher(
                                             prefix=p, alias=p))
                                 else:
                                         plist.append(p)
@@ -3060,7 +3584,7 @@
 
         def __init__(self, img, new_be):
                 self.__plan = img.imageplan
-                self.__img = img
+                self._img = img
                 self.__new_be = new_be
 
         def get_services(self):
@@ -3139,14 +3663,14 @@
                                 src_li = None
                                 if src:
                                         src_li = LicenseInfo(pp.origin_fmri,
-                                            src, img=self.__img)
+                                            src, img=self._img)
 
                                 dest = entry["dest"]
                                 dest_li = None
                                 if dest:
                                         dest_li = LicenseInfo(
                                             pp.destination_fmri, dest,
-                                            img=self.__img)
+                                            img=self._img)
 
                                 yield (pp.destination_fmri, src_li, dest_li,
                                     entry["accepted"], entry["displayed"])
--- a/src/modules/client/api_errors.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/api_errors.py	Wed Feb 09 18:43:21 2011 -0800
@@ -1436,14 +1436,14 @@
                     "publisher.") % self.data
 
 
-class PublisherOriginRequired(PublisherError):
-        """Used to indicate that the specified publisher must have at least one
-        repository with at least one origin URI."""
+class NoPublisherRepositories(PublisherError):
+        """Used to indicate that a Publisher has no repository information
+        configured and so transport operations cannot be performed."""
 
         def __str__(self):
-                return _("At least one origin is required for %s.  You must "
-                "add a new origin before attempting to remove the specified "
-                "origin(s).") % self.data
+                return _("Unable to retrieve requested package data for "
+                    "publisher %s; no repositories are currently configured "
+                    "for use with this publisher.") % self.data
 
 
 class RemovePreferredPublisher(PublisherError):
--- a/src/modules/client/image.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/image.py	Wed Feb 09 18:43:21 2011 -0800
@@ -25,6 +25,7 @@
 #
 
 import atexit
+import copy
 import datetime
 import errno
 import os
@@ -104,6 +105,12 @@
         IMG_CATALOG_KNOWN = "known"
         IMG_CATALOG_INSTALLED = "installed"
 
+        # This is a transitory state used for temporary package sources to
+        # indicate that the package entry should be removed if it does not
+        # also have PKG_STATE_INSTALLED.  This state must not be written
+        # to disk.
+        PKG_STATE_ALT_SOURCE = 99
+
         # Please note that the values of these PKG_STATE constants should not
         # be changed as it would invalidate existing catalog data stored in the
         # image.  This means that if a constant is removed, the values of the
@@ -151,6 +158,12 @@
                 else:
                         assert(imgtype is not None)
 
+                # Alternate package sources.
+                self.__alt_pkg_pub_map = None
+                self.__alt_pubs = None
+                self.__alt_known_cat = None
+                self.__alt_pkg_sources_loaded = False
+
                 # Indicates whether automatic image format upgrades of the
                 # on-disk format are allowed.
                 self.allow_ondisk_upgrade = allow_ondisk_upgrade
@@ -249,6 +262,7 @@
 
                 # This is used to cache image catalogs.
                 self.__catalogs = {}
+                self.__alt_pkg_sources_loaded = False
 
         @property
         def signature_policy(self):
@@ -1412,7 +1426,63 @@
         def gen_publishers(self, inc_disabled=False):
                 if not self.cfg:
                         raise apx.ImageCfgEmptyError(self.root)
-                for pfx, pub in self.cfg.publishers.iteritems():
+
+                alt_pubs = {}
+                if self.__alt_pkg_pub_map:
+                        alt_src_pubs = dict(
+                            (p.prefix, p)
+                            for p in self.__alt_pubs
+                        )
+
+                        for pfx in self.__alt_known_cat.publishers():
+                                # Include alternate package source publishers
+                                # in result, and temporarily enable any
+                                # disabled publishers that already exist in
+                                # the image configuration.
+                                try:
+                                        img_pub = self.cfg.publishers[pfx]
+
+                                        # A blind merge of the signing certs
+                                        # is required since which ones are
+                                        # needed isn't known in advance.
+                                        for attr in ("signing_ca_certs",
+                                            "intermediate_certs"):
+                                                ocerts = set(getattr(img_pub,
+                                                    attr))
+                                                ncerts = getattr(
+                                                    alt_src_pubs[pfx], attr)
+                                                setattr(img_pub, attr,
+                                                    list(ocerts.union(ncerts)))
+
+                                        if not img_pub.disabled:
+                                                # No override needed.
+                                                continue
+                                        new_pub = copy.copy(img_pub)
+                                        new_pub.disabled = False
+
+                                        # Discard origins and mirrors to prevent
+                                        # their accidental use.
+                                        repo = new_pub.selected_repository
+                                        repo.reset_origins()
+                                        repo.reset_mirrors()
+                                except KeyError:
+                                        new_pub = alt_src_pubs[pfx]
+                                        new_pub.meta_root = \
+                                            self._get_publisher_meta_root(pfx)
+                                        new_pub.transport = self.transport
+
+                                alt_pubs[pfx] = new_pub
+
+                publishers = [
+                    alt_pubs.get(p.prefix, p)
+                    for p in self.cfg.publishers.values()
+                ]
+                publishers.extend((
+                    p for p in alt_pubs.values()
+                    if p not in publishers
+                ))
+
+                for pub in publishers:
                         if inc_disabled or not pub.disabled:
                                 yield pub
 
@@ -1424,7 +1494,11 @@
                 whether or not the publisher is enabled"""
 
                 # automatically make disabled publishers not sticky
-                so = self.cfg.get_property("property", "publisher-search-order")
+                so = copy.copy(self.cfg.get_property("property",
+                    "publisher-search-order"))
+
+                pubs = list(self.gen_publishers())
+                so.extend((p.prefix for p in pubs if p.prefix not in so))
 
                 ret = dict([
                     (p.prefix, (so.index(p.prefix), p.sticky, True))
@@ -1483,16 +1557,18 @@
                         self.save_config()
 
         def get_publishers(self):
-                return self.cfg.publishers
+                return dict(
+                    (p.prefix, p)
+                    for p in self.gen_publishers(inc_disabled=True)
+                )
 
         def get_publisher(self, prefix=None, alias=None, origin=None):
-                publishers = [p for p in self.cfg.publishers.values()]
-                for pub in publishers:
+                for pub in self.gen_publishers(inc_disabled=True):
                         if prefix and prefix == pub.prefix:
                                 return pub
                         elif alias and alias == pub.alias:
                                 return pub
-                        elif origin and \
+                        elif origin and pub.selected_repository and \
                             pub.selected_repository.has_origin(origin):
                                 return pub
                 raise apx.UnknownPublisher(max(prefix, alias, origin))
@@ -1534,6 +1610,88 @@
                 """Returns the prefix of the preferred publisher."""
                 return self.cfg.get_property("property", "preferred-publisher")
 
+        def __apply_alt_pkg_sources(self, img_kcat):
+                pkg_pub_map = self.__alt_pkg_pub_map
+                if not pkg_pub_map or self.__alt_pkg_sources_loaded:
+                        # No alternate sources to merge.
+                        return
+
+                # Temporarily merge the package metadata in the alternate
+                # known package catalog for packages not listed in the
+                # image's known catalog.
+                def merge_check(alt_kcat, pfmri, new_entry):
+                        states = new_entry["metadata"]["states"]
+                        if self.PKG_STATE_INSTALLED in states:
+                                # Not interesting; already installed.
+                                return False, None
+                        img_entry = img_kcat.get_entry(pfmri=pfmri)
+                        if not img_entry is None:
+                                # Already in image known catalog.
+                                return False, None
+                        return True, new_entry
+
+                img_kcat.append(self.__alt_known_cat, cb=merge_check)
+                img_kcat.finalize()
+
+                self.__alt_pkg_sources_loaded = True
+                self.transport.cfg.pkg_pub_map = self.__alt_pkg_pub_map
+                self.transport.cfg.alt_pubs = self.__alt_pubs
+                self.transport.cfg.reset_caches()
+
+        def __cleanup_alt_pkg_certs(self):
+                """Private helper function to cleanup package certificate
+                information after use of temporary package data."""
+
+                if not self.__alt_pubs:
+                        return
+
+                # Cleanup publisher cert information; any certs not retrieved
+                # retrieved during temporary publisher use need to be expunged
+                # from the image configuration.
+                for pub in self.__alt_pubs:
+                        try:
+                                ipub = self.cfg.publishers[pub.prefix]
+                        except KeyError:
+                                # Nothing to do.
+                                continue
+
+                        # Elide any certs that were not retrieved and that came
+                        # from temporary package sources.
+                        for hattr in ("signing_ca_certs", "intermediate_certs"):
+                                certs = set(getattr(ipub, hattr))
+                                tcerts = set(getattr(pub, hattr))
+                                for chash in (tcerts & certs):
+                                        cpath = os.path.join(ipub.cert_root,
+                                            chash)
+                                        if not os.path.exists(cpath):
+                                                certs.discard(chash)
+                                setattr(ipub, hattr, list(certs))
+
+        def set_alt_pkg_sources(self, alt_sources):
+                """Specifies an alternate source of package metadata to be
+                temporarily merged with image state so that it can be used
+                as part of packaging operations."""
+
+                if not alt_sources:
+                        self.__init_catalogs()
+                        self.__alt_pkg_pub_map = None
+                        self.__alt_pubs = None
+                        self.__alt_known_cat = None
+                        self.__alt_pkg_sources_loaded = False
+                        self.transport.cfg.pkg_pub_map = None
+                        self.transport.cfg.alt_pubs = None
+                        self.transport.cfg.reset_caches()
+                        return
+                elif self.__alt_pkg_sources_loaded:
+                        # Ensure existing alternate package source data
+                        # is not part of temporary image state.
+                        self.__init_catalogs()
+
+                pkg_pub_map, alt_pubs, alt_kcat, ignored = alt_sources
+                self.__alt_pkg_pub_map = pkg_pub_map
+                self.__alt_pubs = alt_pubs
+                self.__alt_known_cat = alt_kcat
+
         def set_preferred_publisher(self, prefix=None, alias=None, pub=None):
                 """Sets the preferred publisher for packaging operations.
 
@@ -1635,82 +1793,88 @@
 
                 'progtrack' is an optional ProgressTracker object."""
 
-                # API consumer error.
-                repo = pub.selected_repository
-                assert repo and repo.origins
+                with self.locked_op("add-publisher"):
+                        return self.__add_publisher(pub,
+                            refresh_allowed=refresh_allowed,
+                            progtrack=progtrack, approved_cas=EmptyI,
+                            revoked_cas=EmptyI, unset_cas=EmptyI)
+
+        def __add_publisher(self, pub, refresh_allowed=True, progtrack=None,
+            approved_cas=EmptyI, revoked_cas=EmptyI, unset_cas=EmptyI):
+                """Private version of add_publisher(); caller is responsible
+                for locking."""
 
                 if self.version < self.CURRENT_VERSION:
                         raise apx.ImageFormatUpdateNeeded(self.root)
 
-                with self.locked_op("add-publisher"):
-                        for p in self.cfg.publishers.values():
-                                if pub.prefix == p.prefix or \
-                                    pub.prefix == p.alias or \
-                                    pub.alias and (pub.alias == p.alias or
-                                    pub.alias == p.prefix):
-                                        raise apx.DuplicatePublisher(pub)
-
-                        if not progtrack:
-                                progtrack = progress.QuietProgressTracker()
-
-                        # Must assign this first before performing operations.
-                        pub.meta_root = self._get_publisher_meta_root(
-                            pub.prefix)
-                        pub.transport = self.transport
-                        self.cfg.publishers[pub.prefix] = pub
-
-                        # Ensure that if the publisher's meta directory already
-                        # exists for some reason that the data within is not
-                        # used.
-                        self.remove_publisher_metadata(pub, progtrack=progtrack,
-                            rebuild=False)
-
-                        if refresh_allowed:
-                                try:
-                                        # First, verify that the publisher has a
-                                        # valid pkg(5) repository.
-                                        self.transport.valid_publisher_test(pub)
-                                        pub.validate_config()
-                                        self.refresh_publishers(pubs=[pub],
-                                            progtrack=progtrack)
-                                        # Check that all CA certs claimed by
-                                        # this publisher validate against the
-                                        # trust anchors for this image.
-                                        self.signature_policy.check_cas(pub,
-                                            self.trust_anchors)
-                                except Exception, e:
-                                        # Remove the newly added publisher since
-                                        # it is invalid or the retrieval failed.
-                                        self.cfg.remove_publisher(pub.prefix)
-                                        raise
-                                except:
-                                        # Remove the newly added publisher since
-                                        # the retrieval failed.
-                                        self.cfg.remove_publisher(pub.prefix)
-                                        raise
-
-                        for ca in approved_cas:
-                                try:
-                                        ca = os.path.abspath(ca)
-                                        fh = open(ca, "rb")
-                                        s = fh.read()
-                                        fh.close()
-                                except EnvironmentError, e:
-                                        if e.errno == errno.ENOENT:
-                                                raise apx.MissingFileArgumentException(
-                                                    ca)
-                                        raise apx._convert_error(e)
-                                pub.approve_ca_cert(s, manual=True)
-
-                        for hsh in revoked_cas:
-                                pub.revoke_ca_cert(hsh)
-
-                        for hsh in unset_cas:
-                                pub.unset_ca_cert(hsh)
-
-
-                        # Only after success should the configuration be saved.
-                        self.save_config()
+                for p in self.cfg.publishers.values():
+                        if pub.prefix == p.prefix or \
+                            pub.prefix == p.alias or \
+                            pub.alias and (pub.alias == p.alias or
+                            pub.alias == p.prefix):
+                                raise apx.DuplicatePublisher(pub)
+
+                if not progtrack:
+                        progtrack = progress.QuietProgressTracker()
+
+                # Must assign this first before performing operations.
+                pub.meta_root = self._get_publisher_meta_root(
+                    pub.prefix)
+                pub.transport = self.transport
+                self.cfg.publishers[pub.prefix] = pub
+
+                # Ensure that if the publisher's meta directory already
+                # exists for some reason that the data within is not
+                # used.
+                self.remove_publisher_metadata(pub, progtrack=progtrack,
+                    rebuild=False)
+
+                repo = pub.selected_repository
+                if refresh_allowed and repo.origins:
+                        try:
+                                # First, verify that the publisher has a
+                                # valid pkg(5) repository.
+                                self.transport.valid_publisher_test(pub)
+                                pub.validate_config()
+                                self.refresh_publishers(pubs=[pub],
+                                    progtrack=progtrack)
+                                # Check that all CA certs claimed by
+                                # this publisher validate against the
+                                # trust anchors for this image.
+                                self.signature_policy.check_cas(pub,
+                                    self.trust_anchors)
+                        except Exception, e:
+                                # Remove the newly added publisher since
+                                # it is invalid or the retrieval failed.
+                                self.cfg.remove_publisher(pub.prefix)
+                                raise
+                        except:
+                                # Remove the newly added publisher since
+                                # the retrieval failed.
+                                self.cfg.remove_publisher(pub.prefix)
+                                raise
+
+                for ca in approved_cas:
+                        try:
+                                ca = os.path.abspath(ca)
+                                fh = open(ca, "rb")
+                                s = fh.read()
+                                fh.close()
+                        except EnvironmentError, e:
+                                if e.errno == errno.ENOENT:
+                                        raise apx.MissingFileArgumentException(
+                                            ca)
+                                raise apx._convert_error(e)
+                        pub.approve_ca_cert(s, manual=True)
+
+                for hsh in revoked_cas:
+                        pub.revoke_ca_cert(hsh)
+
+                for hsh in unset_cas:
+                        pub.unset_ca_cert(hsh)
+
+                # Only after success should the configuration be saved.
+                self.save_config()
 
         def verify(self, fmri, progresstracker, **args):
                 """Generator that returns a tuple of the form (action, errors,
@@ -1970,7 +2134,8 @@
                 return os.path.join(self.get_manifest_dir(pfmri),
                     "manifest")
 
-        def __get_manifest(self, fmri, excludes=EmptyI, intent=None):
+        def __get_manifest(self, fmri, excludes=EmptyI, intent=None,
+            alt_pub=None):
                 """Find on-disk manifest and create in-memory Manifest
                 object.... grab from server if needed"""
 
@@ -1983,9 +2148,12 @@
                         # if we have a intent string, let depot
                         # know for what we're using the cached manifest
                         if intent:
+                                alt_repo = None
+                                if alt_pub:
+                                        alt_repo = alt_pub.selected_repository
                                 try:
                                         self.transport.touch_manifest(fmri,
-                                            intent)
+                                            intent, alt_repo=alt_repo)
                                 except (apx.UnknownPublisher,
                                     apx.TransportError):
                                         # It's not fatal if we can't find
@@ -1993,10 +2161,11 @@
                                         pass
                 except KeyError:
                         ret = self.transport.get_manifest(fmri, excludes,
-                            intent)
+                            intent, pub=alt_pub)
                 return ret
 
-        def get_manifest(self, fmri, all_variants=False, intent=None):
+        def get_manifest(self, fmri, all_variants=False, intent=None,
+            alt_pub=None):
                 """return manifest; uses cached version if available.
                 all_variants controls whether manifest contains actions
                 for all variants"""
@@ -2010,7 +2179,7 @@
 
                 try:
                         m = self.__get_manifest(fmri, excludes=excludes,
-                            intent=intent)
+                            intent=intent, alt_pub=alt_pub)
                 except apx.ActionExecutionError, e:
                         raise
                 except pkg.actions.ActionError, e:
@@ -2057,6 +2226,13 @@
 
                         if pfmri in added:
                                 states.add(self.PKG_STATE_INSTALLED)
+                                if self.PKG_STATE_ALT_SOURCE in states:
+                                        states.discard(
+                                            self.PKG_STATE_UPGRADABLE)
+                                        states.discard(
+                                            self.PKG_STATE_ALT_SOURCE)
+                                        states.discard(
+                                            self.PKG_STATE_KNOWN)
                         elif self.PKG_STATE_KNOWN not in states:
                                 # This entry is no longer available and has no
                                 # meaningful state information, so should be
@@ -2088,6 +2264,50 @@
                         progtrack.item_add_progress()
                 progtrack.item_done()
 
+                # Discard entries for alternate source packages that weren't
+                # installed as part of the operation.
+                if self.__alt_pkg_pub_map:
+                        for pfmri in self.__alt_known_cat.fmris():
+                                if pfmri in added:
+                                        # Nothing to do.
+                                        continue
+
+                                entry = kcat.get_entry(pfmri)
+                                if not entry:
+                                        # The only reason that the entry should
+                                        # not exist in the 'known' part is
+                                        # because it was removed during the
+                                        # operation.
+                                        assert pfmri in removed
+                                        continue
+
+                                states = entry.get("metadata", {}).get("states",
+                                    EmptyI)
+                                if self.PKG_STATE_ALT_SOURCE in states:
+                                        kcat.remove_package(pfmri)
+
+                        # Now add the publishers of packages that were installed
+                        # from temporary sources that did not previously exist
+                        # to the image's configuration.  (But without any
+                        # origins, sticky, and enabled.)
+                        cfgpubs = set(self.cfg.publishers.keys())
+                        instpubs = set(f.publisher for f in added)
+                        altpubs = self.__alt_known_cat.publishers()
+
+                        # List of publishers that need to be added is the
+                        # intersection of installed and alternate minus
+                        # the already configured.
+                        newpubs = (instpubs & altpubs) - cfgpubs
+                        for pfx in newpubs:
+                                npub = publisher.Publisher(pfx,
+                                    repositories=[publisher.Repository()])
+                                self.__add_publisher(npub,
+                                    refresh_allowed=False)
+
+                        # Ensure image configuration reflects new information.
+                        self.__cleanup_alt_pkg_certs()
+                        self.save_config()
+
                 # Remove manifests of packages that were removed from the
                 # system.  Some packages may have only had facets or
                 # variants changed, so don't remove those.
@@ -2170,13 +2390,22 @@
                 if not self.imgdir:
                         raise RuntimeError("self.imgdir must be set")
 
+                cat = None
                 try:
-                        return self.__catalogs[name]
+                        cat = self.__catalogs[name]
                 except KeyError:
                         pass
 
-                cat = self.__get_catalog(name)
-                self.__catalogs[name] = cat
+
+                if not cat:
+                        cat = self.__get_catalog(name)
+                        self.__catalogs[name] = cat
+
+                if name == self.IMG_CATALOG_KNOWN:
+                        # Apply alternate package source data every time that
+                        # the known catalog is requested.
+                        self.__apply_alt_pkg_sources(cat)
+
                 return cat
 
         def _manifest_cb(self, cat, f):
@@ -3262,21 +3491,24 @@
                 self.__init_catalogs()
 
                 try:
-                        pfunc = getattr(ip, "plan_%s" % plan_name)
-                        pfunc(*args)
-                except apx.ActionExecutionError, e:
-                        raise
-                except pkg.actions.ActionError, e:
-                        raise apx.InvalidPackageErrors([e])
-                except apx.ApiException:
-                        raise
-
-                try:
-                        self.__call_imageplan_evaluate(ip)
-                except apx.ActionExecutionError, e:
-                        raise
-                except pkg.actions.ActionError, e:
-                        raise apx.InvalidPackageErrors([e])
+                        try:
+                                pfunc = getattr(ip, "plan_%s" % plan_name)
+                                pfunc(*args)
+                        except apx.ActionExecutionError, e:
+                                raise
+                        except pkg.actions.ActionError, e:
+                                raise apx.InvalidPackageErrors([e])
+                        except apx.ApiException:
+                                raise
+
+                        try:
+                                self.__call_imageplan_evaluate(ip)
+                        except apx.ActionExecutionError, e:
+                                raise
+                        except pkg.actions.ActionError, e:
+                                raise apx.InvalidPackageErrors([e])
+                finally:
+                        self.__cleanup_alt_pkg_certs()
 
         def make_install_plan(self, pkg_list, progtrack, check_cancelation,
             noexecute, reject_list=EmptyI):
--- a/src/modules/client/imageconfig.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/imageconfig.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2010 Oracle and/or its affiliates.  All rights reserved.
+# Copyright (c) 2007, 2011 Oracle and/or its affiliates.  All rights reserved.
 #
 
 import errno
@@ -499,31 +499,32 @@
                                 self.set_property(section, prop,
                                     getattr(pub, prop))
 
-                        # For now, write out "origin" for compatibility with
-                        # older clients in addition to "origins".  Older
-                        # clients may drop the "origins" when rewriting the
-                        # configuration, but that doesn't really break
-                        # anything.
-                        repo = pub.selected_repository
-                        self.set_property(section, "origin",
-                            repo.origins[0].uri)
-
                         #
                         # For zones, where the reachability of an absolute path
                         # changes depending on whether you're in the zone or
                         # not.  So we have a different policy: ssl_key and
                         # ssl_cert are treated as zone root relative.
                         #
+                        repo = pub.selected_repository
                         ngz = self.variants.get("variant.opensolaris.zone",
                             "global") == "nonglobal"
-                        p = str(pub["ssl_key"])
+
+                        p = ""
+                        for o in repo.origins:
+                                if o.ssl_key:
+                                        p = str(o.ssl_key)
+                                        break
                         if ngz and self.__imgroot != os.sep and p != "None":
                                 # Trim the imageroot from the path.
                                 if p.startswith(self.__imgroot):
                                         p = p[len(self.__imgroot):]
                         self.set_property(section, "ssl_key", p)
 
-                        p = str(pub["ssl_cert"])
+                        p = ""
+                        for o in repo.origins:
+                                if o.ssl_cert:
+                                        p = str(o.ssl_cert)
+                                        break
                         if ngz and self.__imgroot != os.sep and p != "None":
                                 if p.startswith(self.__imgroot):
                                         p = p[len(self.__imgroot):]
--- a/src/modules/client/imageplan.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/imageplan.py	Wed Feb 09 18:43:21 2011 -0800
@@ -1244,7 +1244,6 @@
                 self.__references = None
 
                 self.image.transport.prefetch_manifests(prefetch_mfsts,
-                    progtrack=self.__progtrack,
                     ccancel=self.__check_cancelation)
 
                 # No longer needed.
--- a/src/modules/client/progress.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/progress.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import errno
@@ -98,7 +98,9 @@
                 self.ind_phase_last = "None"
 
                 self.item_cur_nitems = 0
+                self.item_cur_nbytes = 0
                 self.item_goal_nitems = 0
+                self.item_goal_nbytes = 0
                 self.item_phase = "None"
                 self.item_phase_last = "None"
 
@@ -181,6 +183,34 @@
                 self.ver_cur_fmri = None
                 self.ver_output_done()
 
+        def archive_set_goal(self, arcname, nitems, nbytes):
+                self.item_phase = arcname
+                self.item_goal_nitems = nitems
+                self.item_goal_nbytes = nbytes
+
+        def archive_add_progress(self, nitems, nbytes):
+                self.item_cur_nitems += nitems
+                self.item_cur_nbytes += nbytes
+                if self.item_goal_nitems > 0:
+                        self.archive_output()
+
+        def archive_done(self):
+                """ Call when all archiving is finished """
+                if self.item_goal_nitems != 0:
+                        self.archive_output_done()
+
+                if self.item_cur_nitems != self.item_goal_nitems:
+                        logger.error("\nExpected %s files, archived %s files "
+                            "instead." % (self.item_goal_nitems,
+                            self.item_cur_nitems))
+                if self.item_cur_nbytes != self.item_goal_nbytes:
+                        logger.error("\nExpected %s bytes, archived %s bytes "
+                            "instead." % (self.item_goal_nbytes,
+                            self.item_cur_nbytes))
+
+                assert self.item_cur_nitems == self.item_goal_nitems
+                assert self.item_cur_nbytes == self.item_goal_nbytes
+
         def download_set_goal(self, npkgs, nfiles, nbytes):
                 self.dl_goal_npkgs = npkgs
                 self.dl_goal_nfiles = nfiles
@@ -292,12 +322,12 @@
 
         def republish_start_pkg(self, pkgname):
                 self.cur_pkg = pkgname
-                if self.dl_goal_nbytes != 0:
+                if self.item_goal_nitems != 0:
                         self.republish_output()
 
         def republish_end_pkg(self):
                 self.item_cur_nitems += 1
-                if self.dl_goal_nbytes != 0:
+                if self.item_goal_nitems != 0:
                         self.republish_output()
 
         def upload_add_progress(self, nbytes):
@@ -309,7 +339,7 @@
 
         def republish_done(self):
                 """ Call when all downloading is finished """
-                if self.dl_goal_nbytes != 0:
+                if self.item_goal_nitems != 0:
                         self.republish_output_done()
 
         #
@@ -382,6 +412,14 @@
                 raise NotImplementedError("ver_output_done() not implemented "
                     "in superclass")
 
+        def archive_output(self):
+                raise NotImplementedError("archive_output() not implemented in "
+                    "superclass")
+
+        def archive_output_done(self):
+                raise NotImplementedError("archive_output_done() not "
+                    "implemented in superclass")
+
         def dl_output(self):
                 raise NotImplementedError("dl_output() not implemented in "
                     "superclass")
@@ -486,6 +524,12 @@
         def ver_output_info(self, actname, info):
                 return
 
+        def archive_output(self):
+                return
+
+        def archive_output_done(self):
+                return
+
         def dl_output(self):
                 return
 
@@ -603,7 +647,7 @@
                 self.__generic_pkg_output(_("Download: %s ... "))
 
         def republish_output(self):
-                self.__generic_pkg_output(_("Republish : %s ... "))
+                self.__generic_pkg_output(_("Republish: %s ... "))
 
         def __generic_done(self):
                 try:
@@ -634,6 +678,13 @@
                                 raise
                         setattr(self, last_phase_attr, pattr)
 
+        def archive_output(self, force=False):
+                self.__generic_output("item_phase", "item_phase_last",
+                    force=force)
+
+        def archive_output_done(self):
+                self.__generic_done()
+
         def act_output(self, force=False):
                 self.__generic_output("act_phase", "act_phase_last",
                     force=force)
@@ -881,6 +932,43 @@
                                 raise PipeError, e
                         raise
 
+        def archive_output(self, force=False):
+                if self.item_started and not force and \
+                    (time.time() - self.last_print_time) < self.TERM_DELAY:
+                        return
+
+                self.last_print_time = time.time()
+                try:
+                        # The first time, emit header.
+                        if not self.item_started:
+                                self.item_started = True
+                                if self.last_print_time:
+                                        print
+                                print "%-45s %11s %12s" % (_("ARCHIVE"),
+                                    _("FILES"), _("STORE (MB)"))
+                        else:
+                                print self.cr,
+
+                        s = "%-45.45s %11s %12s" % \
+                            (self.item_phase,
+                                "%d/%d" % \
+                                (self.item_cur_nitems,
+                                self.item_goal_nitems),
+                            "%.1f/%.1f" % \
+                                ((self.item_cur_nbytes / 1024.0 / 1024.0),
+                                (self.item_goal_nbytes / 1024.0 / 1024.0)))
+                        sys.stdout.write(s + self.clear_eol)
+                        self.needs_cr = True
+                        sys.stdout.flush()
+                except IOError, e:
+                        if e.errno == errno.EPIPE:
+                                raise PipeError, e
+                        raise
+
+        def archive_output_done(self):
+                self.archive_output(force=True)
+                self.__generic_simple_done()
+
         def dl_output(self, force=False):
                 if self.dl_started and not force and \
                     (time.time() - self.last_print_time) < self.TERM_DELAY:
--- a/src/modules/client/publisher.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/publisher.py	Wed Feb 09 18:43:21 2011 -0800
@@ -762,12 +762,19 @@
         # from during __copy__.
         _source_object_id = None
 
-        def __init__(self, prefix, alias=None, client_uuid=None, disabled=False,
-            meta_root=None, repositories=None, selected_repository=None,
-            transport=None, sticky=True, ca_certs=EmptyI,
-            intermediate_certs=EmptyI, props=None, revoked_ca_certs=EmptyI,
-            approved_ca_certs=EmptyI):
-                """Initialize a new publisher object."""
+        def __init__(self, prefix, alias=None, catalog=None, client_uuid=None,
+            disabled=False, meta_root=None, repositories=None,
+            selected_repository=None, transport=None, sticky=True,
+            ca_certs=EmptyI, intermediate_certs=EmptyI, props=None,
+            revoked_ca_certs=EmptyI, approved_ca_certs=EmptyI):
+                """Initialize a new publisher object.
+
+                'catalog' is an optional Catalog object to use in place of
+                retrieving one from the publisher's meta_root.  This option
+                may only be used when meta_root is not provided.
+                """
+
+                assert not (catalog and meta_root)
 
                 if client_uuid is None:
                         self.reset_client_uuid()
@@ -834,6 +841,9 @@
                 self.__verified_cas = False
                 self.__bad_ca_certs = set()
 
+                # Must be done last.
+                self.__catalog = catalog
+
         def __cmp__(self, other):
                 if other is None:
                         return 1
@@ -1133,6 +1143,8 @@
                 selected repository, or None if available."""
 
                 if not self.meta_root:
+                        if self.__catalog:
+                                return self.__catalog
                         return None
 
                 if not self.__catalog:
@@ -1498,6 +1510,10 @@
                                 # No refresh needed.
                                 return False
 
+                if not self.selected_repository.origins:
+                        # Nothing to do.
+                        return False
+
                 # Create temporary directory for assembly of catalog pieces.
                 try:
                         tempdir = tempfile.mkdtemp(dir=self.catalog_root)
--- a/src/modules/client/transport/repo.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/transport/repo.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import cStringIO
@@ -37,9 +37,9 @@
 import pkg
 import pkg.p5i as p5i
 import pkg.client.api_errors as apx
-import pkg.client.publisher as publisher
 import pkg.client.transport.exception as tx
 import pkg.config as cfg
+import pkg.p5p
 import pkg.server.repository as svr_repo
 import pkg.server.query_parser as sqp
 
@@ -838,16 +838,6 @@
                 return self.__start_trans(baseurl, header, client_release,
                     pkg_name)
 
-        def publish_refresh_index(self, header=None):
-                """If the Repo points to a Repository that has a refresh-able
-                index, refresh the index."""
-
-                requesturl = self.__get_request_url("admin/0", query={
-                    "cmd": "rebuild" }, pub=pub)
-                fobj = self._fetch_url(requesturl, header=header,
-                    failonerror=False)
-                self.__check_response_body(fobj)
-
         def publish_rebuild(self, header=None, pub=None):
                 """Attempt to rebuild the package data and search data in the
                 repository."""
@@ -985,7 +975,10 @@
                     data_fp=data_fp, failonerror=failonerror)
 
 
-class FileRepo(TransportRepo):
+class _FilesystemRepo(TransportRepo):
+        """Private implementation of transport repository logic for filesystem
+        repositories.
+        """
 
         def __init__(self, repostats, repouri, engine, frepo=None):
                 """Create a file repo.  Repostats is a RepoStats object.
@@ -1648,6 +1641,314 @@
 
                 return True
 
+class _ArchiveRepo(TransportRepo):
+        """Private implementation of transport repository logic for repositories
+        contained within an archive.
+        """
+
+        def __init__(self, repostats, repouri, engine):
+                """Create a file repo.  Repostats is a RepoStats object.
+                Repouri is a RepositoryURI object.  Engine is a transport
+                engine object.
+
+                The convenience function new_repo() can be used to create
+                the correct repo."""
+
+                self._arc = None
+                self._url = repostats.url
+                self._repouri = repouri
+                self._engine = engine
+                self._verdata = None
+                self.__stats = repostats
+
+                try:
+                        scheme, netloc, path, params, query, fragment = \
+                            urlparse.urlparse(self._repouri.uri, "file",
+                            allow_fragments=0)
+                        # Path must be rstripped of separators to be used as
+                        # a file.
+                        path = urllib.url2pathname(path.rstrip(os.path.sep))
+                        self._arc = pkg.p5p.Archive(path, mode="r")
+                except pkg.p5p.InvalidArchive, e:
+                        ex = tx.TransportProtoError("file", errno.EINVAL,
+                            reason=str(e), repourl=self._url)
+                        self.__record_proto_error(ex)
+                        raise ex
+                except Exception, e:
+                        ex = tx.TransportProtoError("file", errno.EPROTO,
+                            reason=str(e), repourl=self._url)
+                        self.__record_proto_error(ex)
+                        raise ex
+
+        def __record_proto_error(self, ex):
+                """Private helper function that records a protocol error that
+                was raised by the class instead of the transport engine.  It
+                records both that a transaction was initiated and that an
+                error occurred."""
+
+                self.__stats.record_tx()
+                self.__stats.record_error(decayable=ex.decayable)
+
+        def add_version_data(self, verdict):
+                """Cache the information about what versions a repository
+                supports."""
+
+                self._verdata = verdict
+
+        def get_catalog1(self, filelist, destloc, header=None, ts=None,
+            progtrack=None, pub=None, revalidate=False, redownload=False):
+                """Get the files that make up the catalog components
+                that are listed in 'filelist'.  Download the files to
+                the directory specified in 'destloc'.  The caller
+                may optionally specify a dictionary with header
+                elements in 'header'.  If a conditional get is
+                to be performed, 'ts' should contain a floating point
+                value of seconds since the epoch.  This protocol
+                doesn't implment revalidate and redownload.  The options
+                are ignored."""
+
+                pub_prefix = getattr(pub, "prefix", None)
+                errors = []
+                for f in filelist:
+                        try:
+                                self._arc.extract_catalog1(f, destloc,
+                                   pub=pub_prefix)
+                                if progtrack:
+                                        fs = os.stat(os.path.join(destloc, f))
+                                        progtrack.download_add_progress(1,
+                                            fs.st_size)
+                        except pkg.p5p.UnknownArchiveFiles, e:
+                                ex = tx.TransportProtoError("file",
+                                    errno.ENOENT, reason=str(e),
+                                    repourl=self._url, request=f)
+                                self.__record_proto_error(ex)
+                                errors.append(ex)
+                                continue
+                        except Exception, e:
+                                ex = tx.TransportProtoError("file",
+                                    errno.EPROTO, reason=str(e),
+                                    repourl=self._url, request=f)
+                                self.__record_proto_error(ex)
+                                errors.append(ex)
+                                continue
+                return errors
+
+        def get_datastream(self, fhash, version, header=None, ccancel=None,
+            pub=None):
+                """Get a datastream from a repo.  The name of the file is given
+                in fhash."""
+
+                pub_prefix = getattr(pub, "prefix", None)
+                try:
+                        return self._arc.get_package_file(fhash,
+                            pub=pub_prefix)
+                except pkg.p5p.UnknownArchiveFiles, e:
+                        ex = tx.TransportProtoError("file", errno.ENOENT,
+                            reason=str(e), repourl=self._url, request=fhash)
+                        self.__record_proto_error(ex)
+                        raise ex
+                except Exception, e:
+                        ex = tx.TransportProtoError("file", errno.EPROTO,
+                            reason=str(e), repourl=self._url, request=fhash)
+                        self.__record_proto_error(ex)
+                        raise ex
+
+        def get_publisherinfo(self, header=None, ccancel=None):
+                """Get publisher information from the repository."""
+
+                try:
+                        pubs = self._arc.get_publishers()
+                        buf = cStringIO.StringIO()
+                        p5i.write(buf, pubs)
+                except Exception, e:
+                        reason = "Unable to retrieve publisher configuration " \
+                            "data:\n%s" % e
+                        ex = tx.TransportProtoError("file", errno.EPROTO,
+                            reason=reason, repourl=self._url)
+                        self.__record_proto_error(ex)
+                        raise ex
+                buf.seek(0)
+                return buf
+
+        def get_manifest(self, fmri, header=None, ccancel=None, pub=None):
+                """Get a manifest from repo.  The fmri of the package for the
+                manifest is given in fmri."""
+
+                try:
+                        return self._arc.get_package_manifest(fmri, raw=True)
+                except pkg.p5p.UnknownPackageManifest, e:
+                        ex = tx.TransportProtoError("file", errno.ENOENT,
+                            reason=str(e), repourl=self._url, request=fmri)
+                        self.__record_proto_error(ex)
+                        raise ex
+                except Exception, e:
+                        ex = tx.TransportProtoError("file", errno.EPROTO,
+                            reason=str(e), repourl=self._url, request=fmri)
+                        self.__record_proto_error(ex)
+                        raise ex
+
+        def get_manifests(self, mfstlist, dest, progtrack=None, pub=None):
+                """Get manifests named in list.  The mfstlist argument contains
+                tuples (fmri, header).  This is so that each manifest may have
+                unique header information.  The destination directory is spec-
+                ified in the dest argument."""
+
+                errors = []
+                for fmri, h in mfstlist:
+                        try:
+                                self._arc.extract_package_manifest(fmri, dest,
+                                   filename=fmri.get_url_path())
+                                if progtrack:
+                                        fs = os.stat(os.path.join(dest,
+                                            fmri.get_url_path()))
+                                        progtrack.download_add_progress(1,
+                                            fs.st_size)
+                        except pkg.p5p.UnknownPackageManifest, e:
+                                ex = tx.TransportProtoError("file",
+                                    errno.ENOENT, reason=str(e),
+                                    repourl=self._url, request=fmri)
+                                self.__record_proto_error(ex)
+                                errors.append(ex)
+                                continue
+                        except Exception, e:
+                                ex = tx.TransportProtoError("file",
+                                    errno.EPROTO, reason=str(e),
+                                    repourl=self._url, request=fmri)
+                                self.__record_proto_error(ex)
+                                errors.append(ex)
+                                continue
+                return errors
+
+        def get_files(self, filelist, dest, progtrack, version, header=None, pub=None):
+                """Get multiple files from the repo at once.
+                The files are named by hash and supplied in filelist.
+                If dest is specified, download to the destination
+                directory that is given.  If progtrack is not None,
+                it contains a ProgressTracker object for the
+                downloads."""
+
+                pub_prefix = getattr(pub, "prefix", None)
+                errors = []
+                for f in filelist:
+                        try:
+                                self._arc.extract_package_files([f], dest,
+                                    pub=pub_prefix)
+                                if progtrack:
+                                        fs = os.stat(os.path.join(dest, f))
+                                        progtrack.download_add_progress(1,
+                                            fs.st_size)
+                        except pkg.p5p.UnknownArchiveFiles, e:
+                                ex = tx.TransportProtoError("file",
+                                    errno.ENOENT, reason=str(e),
+                                    repourl=self._url, request=f)
+                                self.__record_proto_error(ex)
+                                errors.append(ex)
+                                continue
+                        except Exception, e:
+                                ex = tx.TransportProtoError("file",
+                                    errno.EPROTO, reason=str(e),
+                                    repourl=self._url, request=f)
+                                self.__record_proto_error(ex)
+                                errors.append(ex)
+                                continue
+                return errors
+
+        def get_url(self):
+                """Returns the repo's url."""
+
+                return self._url
+
+        def get_versions(self, header=None, ccancel=None):
+                """Query the repo for versions information.
+                Returns a file-like object."""
+
+                buf = cStringIO.StringIO()
+                vops = {
+                    "catalog": ["1"],
+                    "file": ["0"],
+                    "manifest": ["0"],
+                    "publisher": ["0", "1"],
+                    "versions": ["0"],
+                }
+
+                buf.write("pkg-server %s\n" % pkg.VERSION)
+                buf.write("\n".join(
+                    "%s %s" % (op, " ".join(vers))
+                    for op, vers in vops.iteritems()
+                ) + "\n")
+                buf.seek(0)
+                self.__stats.record_tx()
+                return buf
+
+        def has_version_data(self):
+                """Returns true if this repo knows its version information."""
+
+                return self._verdata is not None
+
+        def supports_version(self, op, verlist):
+                """Returns version-id of highest supported version.
+                If the version is not supported, or no data is available,
+                -1 is returned instead."""
+
+                if not self.has_version_data() or op not in self._verdata:
+                        return -1
+
+                # This code assumes that both the verlist and verdata
+                # are sorted in reverse order.  This behavior is currently
+                # implemented in the transport code.
+
+                for v in verlist:
+                        if v in self._verdata[op]:
+                                return v
+                return -1
+
+        def touch_manifest(self, mfst, header=None, ccancel=None, pub=None):
+                """No-op."""
+                return True
+
+
+class FileRepo(object):
+        """Factory class for creating transport repository objects for
+        filesystem-based repository sources.
+        """
+
+        def __new__(cls, repostats, repouri, engine, frepo=None):
+                """Returns a new transport repository object based on the
+                provided information.
+
+                'repostats' is a RepoStats object.
+
+                'repouri' is a RepositoryURI object.
+                
+                'engine' is a transport engine object.
+
+                'frepo' is an optional Repository object to use instead
+                of creating one.
+
+                The convenience function new_repo() can be used to create
+                the correct repo."""
+
+                try:
+                        scheme, netloc, path, params, query, fragment = \
+                            urlparse.urlparse(repouri.uri, "file",
+                            allow_fragments=0)
+                        path = urllib.url2pathname(path)
+                except Exception, e:
+                        ex = tx.TransportProtoError("file", errno.EPROTO,
+                            reason=str(e), repourl=repostats.url)
+                        repostats.record_tx()
+                        repostats.record_error(decayable=ex.decayable)
+                        raise ex
+
+                # Path must be rstripped of separators for this check to
+                # succeed.
+                if not frepo and os.path.isfile(path.rstrip(os.path.sep)):
+                        # Assume target is a repository archive.
+                        return _ArchiveRepo(repostats, repouri, engine)
+
+                # Assume target is a filesystem repository.
+                return _FilesystemRepo(repostats, repouri, engine, frepo=frepo)
+
 
 # ProgressCallback objects that bridge the interfaces between ProgressTracker,
 # and the necessary callbacks for the TransportEngine.
--- a/src/modules/client/transport/transport.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/client/transport/transport.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,10 +21,11 @@
 #
 
 #
-# Copyright (c) 2009, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import cStringIO
+import copy
 import errno
 import httplib
 import os
@@ -65,6 +66,8 @@
 
         def __init__(self):
                 self.__caches = {}
+                self.pkg_pub_map = None
+                self.alt_pubs = None
 
         def add_cache(self, path, pub=None, readonly=True):
                 """Adds the directory specified by 'path' as a location to read
@@ -502,8 +505,9 @@
                 # of origins for a publisher without incurring the significant
                 # overhead of performing file-based search unless the network-
                 # based resource is unavailable.
-                for d in self.__gen_repo(pub, retry_count, origin_only=True,
-                    prefer_remote=True, alt_repo=alt_repo):
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    prefer_remote=True, alt_repo=alt_repo, operation="search",
+                    versions=[0, 1]):
 
                         try:
                                 fobj = d.do_search(data, header,
@@ -900,18 +904,45 @@
                 raise failures
 
         @LockedTransport()
-        def get_content(self, pub, fhash, ccancel=None):
-                """Given a fmri and fhash, return the uncompressed content
-                from the remote object.  This is similar to get_datstream,
-                except that the transport handles retrieving and decompressing
-                the content."""
+        def get_content(self, pub, fhash, fmri=None, ccancel=None):
+                """Given a fhash, return the uncompressed content content from
+                the remote object.  This is similar to get_datastream, except
+                that the transport handles retrieving and decompressing the
+                content.
+
+                'fmri' If the fhash corresponds to a known package, the fmri
+                should be specified for optimal transport performance.
+                """
 
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 failures = tx.TransportFailures()
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
+                alt_repo = None
+                if not fmri and self.cfg.alt_pubs:
+                        # No FMRI was provided, but alternate package sources
+                        # are available, so create a new repository object
+                        # that composites the repository information returned
+                        # from the image with the alternate sources for this
+                        # publisher.
+                        alt_repo = pub.selected_repository
+                        if alt_repo:
+                                alt_repo = copy.copy(alt_repo)
+                        else:
+                                alt_repo = publisher.Repository()
+
+                        for tpub in self.cfg.alt_pubs:
+                                if tpub.prefix != pub.prefix:
+                                        continue
+                                for o in tpub.selected_repository.origins:
+                                        if not alt_repo.has_origin(o):
+                                                alt_repo.add_origin(o)
+                elif self.cfg.pkg_pub_map:
+                        alt_repo = self.__get_alt_repo(fmri,
+                            self.cfg.pkg_pub_map)
+
                 for d, v in self.__gen_repo(pub, retry_count, operation="file",
-                    versions=[0, 1]):
+                    versions=[0, 1], alt_repo=alt_repo):
 
                         url = d.get_url()
 
@@ -1013,13 +1044,17 @@
                 as intent."""
 
                 failures = tx.TransportFailures()
-                pub_prefix = fmri.get_publisher()
+                pub_prefix = fmri.publisher
                 pub = self.cfg.get_publisher(pub_prefix)
                 mfst = fmri.get_url_path()
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 header = self.__build_header(intent=intent,
                     uuid=self.__get_uuid(pub))
 
+                pmap = self.cfg.pkg_pub_map
+                if not alt_repo and pmap:
+                        alt_repo = self.__get_alt_repo(fmri, pmap)
+
                 for d in self.__gen_repo(pub, retry_count, origin_only=True,
                     alt_repo=alt_repo):
 
@@ -1053,7 +1088,7 @@
 
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 failures = tx.TransportFailures()
-                pub_prefix = fmri.get_publisher()
+                pub_prefix = fmri.publisher
                 download_dir = self.cfg.incoming_root
                 mcontent = None
                 header = None
@@ -1077,6 +1112,10 @@
                 # the directories.
                 self._makedirs(download_dir)
 
+                pmap = self.cfg.pkg_pub_map
+                if not alt_repo and pmap:
+                        alt_repo = self.__get_alt_repo(fmri, pmap)
+
                 for d in self.__gen_repo(pub, retry_count, origin_only=True,
                     alt_repo=alt_repo):
 
@@ -1088,7 +1127,7 @@
                                 mcontent = resp.read()
 
                                 verified = self._verify_manifest(fmri,
-                                    content=mcontent)
+                                    content=mcontent, pub=pub)
 
                                 if content_only:
                                         return mcontent
@@ -1124,6 +1163,15 @@
 
                 raise failures
 
+        def __get_alt_repo(self, pfmri, pmap):
+                # Package data should be retrieved from an
+                # alternate location.
+                pfx, stem, ver = pfmri.tuple()
+                sver = str(ver)
+                if pfx in pmap and stem in pmap[pfx] and \
+                    sver in pmap[pfx][stem]:
+                        return pmap[pfx][stem][sver].selected_repository
+
         @LockedTransport()
         def prefetch_manifests(self, fetchlist, excludes=misc.EmptyI,
             progtrack=None, ccancel=None, alt_repo=None):
@@ -1178,24 +1226,45 @@
                 # instance for each publisher's worth of requests that
                 # this routine must process.
                 mx_pub = {}
+
+                pmap = None
+                if not alt_repo:
+                        pmap = self.cfg.pkg_pub_map
+
                 for fmri, intent in fetchlist:
-                        pub_prefix = fmri.get_publisher()
-                        pub = self.cfg.get_publisher(pub_prefix)
+                        if pmap:
+                                alt_repo = self.__get_alt_repo(fmri, pmap)
+
+                        # Multi transfer object must be created for each unique
+                        # publisher or repository.
+                        if alt_repo:
+                                eid = id(alt_repo)
+                        else:
+                                eid = fmri.publisher
+
+                        pub = self.cfg.get_publisher(fmri.publisher)
                         header = self.__build_header(intent=intent,
                             uuid=self.__get_uuid(pub))
-                        if pub_prefix not in mx_pub:
-                                mx_pub[pub_prefix] = MultiXfr(pub,
+
+                        if eid not in mx_pub:
+                                mx_pub[eid] = MultiXfr(pub,
                                     progtrack=progtrack,
-                                    ccancel=ccancel)
+                                    ccancel=ccancel,
+                                    alt_repo=alt_repo)
+
                         # Add requests keyed by requested package
                         # fmri.  Value contains (header, fmri) tuple.
-                        mx_pub[pub_prefix].add_hash(
-                            fmri, (header, fmri))
+                        mx_pub[eid].add_hash(fmri, (header, fmri))
+
+                        # Must reset every cycle if pmap is set.
+                        if pmap:
+                                alt_repo = None
 
                 for mxfr in mx_pub.values():
                         namelist = [k for k in mxfr]
                         while namelist:
                                 chunksz = self.__chunk_size(pub,
+                                    alt_repo=mxfr.get_alt_repo(),
                                     origin_only=True)
                                 mfstlist = [
                                     (n, mxfr[n][0])
@@ -1205,13 +1274,11 @@
 
                                 try:
                                         self._prefetch_manifests_list(mxfr,
-                                            mfstlist, excludes,
-                                            alt_repo=alt_repo)
+                                            mfstlist, excludes)
                                 except apx.PermissionsException:
                                         return
 
-        def _prefetch_manifests_list(self, mxfr, mlist, excludes=misc.EmptyI,
-            alt_repo=None):
+        def _prefetch_manifests_list(self, mxfr, mlist, excludes=misc.EmptyI):
                 """Perform bulk manifest prefetch.  This is the routine
                 that downloads initiates the downloads in chunks
                 determined by its caller _prefetch_manifests.  The mxfr
@@ -1228,7 +1295,7 @@
                 download_dir = self.cfg.incoming_root
 
                 for d in self.__gen_repo(pub, retry_count, origin_only=True,
-                    alt_repo=alt_repo):
+                    alt_repo=mxfr.get_alt_repo()):
 
                         failedreqs = []
                         repostats = self.stats[d.get_url()]
@@ -1319,7 +1386,8 @@
                                         continue
 
                                 os.remove(dl_path)
-                                progtrack.evaluate_progress(fmri)
+                                if progtrack:
+                                        progtrack.evaluate_progress(fmri)
                                 mxfr.del_hash(s)
 
                         # If there were failures, re-generate list for just
@@ -1335,7 +1403,7 @@
                         else:
                                 return
 
-        def _verify_manifest(self, fmri, mfstpath=None, content=None):
+        def _verify_manifest(self, fmri, mfstpath=None, content=None, pub=None):
                 """Verify a manifest.  The caller must supply the FMRI
                 for the package in 'fmri', as well as the path to the
                 manifest file that will be verified.  If signature information
@@ -1349,11 +1417,12 @@
                 the manifest content in 'content'.  One of these arguments
                 must be used."""
 
-                # Get publisher information from FMRI.
-                try:
-                        pub = self.cfg.get_publisher(fmri.get_publisher())
-                except apx.UnknownPublisher:
-                        return False
+                if not isinstance(pub, publisher.Publisher):
+                        # Get publisher using information from FMRI.
+                        try:
+                                pub = self.cfg.get_publisher(fmri.publisher)
+                        except apx.UnknownPublisher:
+                                return False
 
                 # Handle case where publisher has no Catalog.
                 if not pub.catalog:
@@ -1602,7 +1671,8 @@
                 while mfile:
 
                         filelist = []
-                        chunksz = self.__chunk_size(pub)
+                        chunksz = self.__chunk_size(pub,
+                            alt_repo=mfile.get_alt_repo())
 
                         for i, v in enumerate(mfile):
                                 if i >= chunksz:
@@ -1779,6 +1849,7 @@
                         repo = alt_repo
                 elif isinstance(pub, publisher.Publisher):
                         repo = pub.selected_repository
+                        assert repo
 
                 if repo and origin_only:
                         repolist = repo.origins
@@ -1856,13 +1927,20 @@
                         if not repo_found and fail:
                                 raise fail
                         if not repo_found and operation and versions:
+                                if not origins and \
+                                    isinstance(pub, publisher.Publisher):
+                                        # Special error case; no transport
+                                        # configuration available for this
+                                        # publisher.
+                                        raise apx.NoPublisherRepositories(pub)
+
                                 # If a versioned operation was requested and
                                 # wasn't found, then raise an unsupported
                                 # exception using the newest version allowed.
                                 raise apx.UnsupportedRepositoryOperation(pub,
                                     "%s/%d" % (operation, versions[-1]))
 
-        def __chunk_size(self, pub, origin_only=False):
+        def __chunk_size(self, pub, alt_repo=None, origin_only=False):
                 """Determine the chunk size based upon how many of the known
                 mirrors have been visited.  If not all mirrors have been
                 visited, choose a small size so that if it ends up being
@@ -1875,14 +1953,18 @@
                 if not self.__engine:
                         self.__setup()
 
-                if isinstance(pub, publisher.Publisher):
+                if alt_repo:
+                        repolist = alt_repo.origins[:]
+                        if not origin_only:
+                                repolist.extend(alt_repo.mirrors)
+                elif isinstance(pub, publisher.Publisher):
                         repo = pub.selected_repository
                         repolist = repo.origins[:]
                         if not origin_only:
                                 repolist.extend(repo.mirrors)
                 else:
                         # If caller passed RepositoryURI object in as
-                        # pub argument, repolist is the RepoURI
+                        # pub argument, repolist is the RepoURI.
                         repolist = [pub]
 
                 n = len(repolist)
@@ -2024,8 +2106,13 @@
                 if not self.__engine:
                         self.__setup()
 
-                publisher = self.cfg.get_publisher(fmri.get_publisher())
-                mfile = MultiFile(publisher, self, progtrack, ccancel)
+                pmap = self.cfg.pkg_pub_map
+                if not alt_repo and pmap:
+                        alt_repo = self.__get_alt_repo(fmri, pmap)
+
+                publisher = self.cfg.get_publisher(fmri.publisher)
+                mfile = MultiFile(publisher, self, progtrack, ccancel,
+                    alt_repo=alt_repo)
 
                 return mfile
 
@@ -2043,8 +2130,7 @@
                         self.__setup()
 
                 mfile = MultiFileNI(publisher, self, final_dir,
-                    decompress=decompress, progtrack=progtrack, ccancel=ccancel,
-                    alt_repo=alt_repo)
+                    decompress=decompress, progtrack=progtrack, ccancel=ccancel)
 
                 return mfile
 
@@ -2701,7 +2787,7 @@
 
         def add_action(self, action):
                 """The multiple file retrieval operation is asynchronous.
-                Add files to retrieve with this function.   The caller
+                Add files to retrieve with this function.  The caller
                 should pass the action, which causes its file to
                 be added to an internal retrieval list."""
 
--- a/src/modules/lint/engine.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/lint/engine.py	Wed Feb 09 18:43:21 2011 -0800
@@ -39,7 +39,7 @@
 import sys
 
 PKG_CLIENT_NAME = "pkglint"
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 pkg.client.global_settings.client_name = PKG_CLIENT_NAME
 
 class LintEngineException(Exception):
--- a/src/modules/misc.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/misc.py	Wed Feb 09 18:43:21 2011 -0800
@@ -747,14 +747,21 @@
 
         return default_root
 
-def parse_uri(uri):
+def parse_uri(uri, cwd=None):
         """Parse the repository location provided and attempt to transform it
         into a valid repository URI.
+
+        'cwd' is the working directory to use to turn paths into an absolute
+        path.  If not provided, the current working directory is used.
         """
 
         if uri.find("://") == -1 and not uri.startswith("file:/"):
                 # Convert the file path to a URI.
-                uri = os.path.abspath(uri)
+                if not cwd:
+                        uri = os.path.abspath(uri)
+                elif not os.path.isabs(uri):
+                        uri = os.path.join(cwd, uri)
+
                 uri = urlparse.urlunparse(("file", "",
                     urllib.pathname2url(uri), "", "", ""))
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/p5p.py	Wed Feb 09 18:43:21 2011 -0800
@@ -0,0 +1,1308 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
+#
+
+import atexit
+import collections
+import errno
+import tarfile as tf
+import pkg.pkggzip
+import pkg.pkgtarfile as ptf
+import os
+import pkg
+import pkg.client.api_errors as apx
+import pkg.client.publisher
+import pkg.fmri
+import pkg.misc
+import pkg.portable
+import pkg.p5i
+import shutil
+import tempfile
+import urllib
+
+
+class ArchiveErrors(apx.ApiException):
+        """Base exception class for archive class errors."""
+
+
+class InvalidArchiveIndex(ArchiveErrors):
+        """Used to indicate that the specified index is in a format not
+        supported or recognized by this version of the pkg(5) ArchiveIndex
+        class."""
+
+        def __init__(self, arc_name):
+                ArchiveErrors.__init__(self)
+                self.__name = arc_name
+
+        def __str__(self):
+                return _("%s is not in a supported or recognizable archive "
+                    "index format.") % self.__name
+
+
+class ArchiveIndex(object):
+        """Class representing a pkg(5) archive table of contents and a set of
+        interfaces to populate and retrieve entries.
+
+        Entries in this file are written in the following format:
+
+            <name>NUL<offset>NUL<entry_size>NUL<size>NUL<typeflag>NULNL
+
+            <name> is a string containing the pathname of the file in the
+            archive.  It can be up to 65,535 bytes in length.
+
+            <offset> is an unsigned long long integer containing the relative
+            offset in bytes of the first header block for the file in the
+            archive.  The offset is relative to the end of the last block of
+            the first file in the archive.
+
+            <entry_size> is an unsigned long long integer containing the size of
+            the file's entry in bytes in the archive (including archive
+            headers and trailers for the entry).
+
+            <size> is an unsigned long long integer containing the size of the
+            file in bytes in the archive.
+
+            <typeflag> is a single character representing the type of the file
+            in the archive.  Possible values are:
+                0 Regular File
+                1 Hard Link
+                2 Symbolic Link
+                5 Directory or subdirectory"""
+
+        version = None
+        CURRENT_VERSION = 0
+        COMPATIBLE_VERSIONS = 0,
+        ENTRY_FORMAT = "%s\0%d\0%d\0%d\0%c\0\n"
+
+        def __init__(self, name, mode="r", version=None):
+                """Open a pkg(5) archive table of contents file.
+
+                'name' should be the absolute path of the file to use when
+                reading or writing index data.
+
+                'mode' indicates whether the index is being used for reading
+                or writing, and can be 'r' or 'w'.  Appending to or updating
+                a table of contents file is not supported.
+
+                'version' is an optional integer value specifying the version
+                of the index to be read or written.  If not specified, the
+                current version is assumed.
+                """
+
+                assert os.path.isabs(name)
+                if version is None:
+                        version = self.CURRENT_VERSION
+                if version not in self.COMPATIBLE_VERSIONS:
+                        raise InvalidArchiveIndex(name)
+
+                self.__closed = False
+                self.__name = name
+                self.__mode = mode + "b"
+                try:
+                        self.__file = pkg.pkggzip.PkgGzipFile(self.__name,
+                            self.__mode)
+                except IOError, e:
+                        if e.errno:
+                                raise
+                        # Underlying gzip library raises this exception if the
+                        # file isn't a valid gzip file.  So, assume that if
+                        # errno isn't set, this is a gzip error instead.
+                        raise InvalidArchiveIndex(name)
+
+                self.version = version
+
+        def __exit__(self, exc_type, exc_value, exc_tb):
+                """Context handler that ensures archive is automatically closed
+                in a non-error condition scenario.  This enables 'with' usage.
+                """
+                if exc_type or exc_value or exc_tb:
+                        # Only close filehandles in an error condition.
+                        self.__close_fh()
+                else:
+                        # Close archive normally in all other cases.
+                        self.close()
+
+        @property
+        def pathname(self):
+                """The absolute path of the archive index file."""
+                return self.__name
+
+        def add(self, name, offset, entry_size, size, typeflag):
+                """Add an entry for the given archive file to the table of
+                contents."""
+
+                self.__file.write(self.ENTRY_FORMAT % (name, offset, entry_size,
+                    size, typeflag))
+
+        def offsets(self):
+                """Returns a generator that yields tuples of the form (name,
+                offset) for each file in the index."""
+
+                self.__file.seek(0)
+                l = None
+                try:
+                        for line in self.__file:
+                                if line[-2] != "\0":
+                                        # Filename contained newline.
+                                        if l is None:
+                                                l = line
+                                        else:
+                                                l += "\n"
+                                                l += line
+                                        continue
+                                elif l is None:
+                                        l = line
+
+                                name, offset, ignored = l.split("\0", 2)
+                                yield name, long(offset)
+                                l = None
+                except ValueError:
+                        raise InvalidArchiveIndex(self.__name)
+                except IOError, e:
+                        if e.errno:
+                                raise
+                        # Underlying gzip library raises this exception if the
+                        # file isn't a valid gzip file.  So, assume that if
+                        # errno isn't set, this is a gzip error instead.
+                        raise InvalidArchiveIndex(self.__name)
+
+        def close(self):
+                """Close the index.  No further operations can be performed
+                using this object once closed."""
+
+                if self.__closed:
+                        return
+                if self.__file:
+                        self.__file.close()
+                        self.__file = None
+                self.__closed = True
+
+
+class InvalidArchive(ArchiveErrors):
+        """Used to indicate that the specified archive is in a format not
+        supported or recognized by this version of the pkg(5) Archive class.
+        """
+
+        def __init__(self, arc_name):
+                ArchiveErrors.__init__(self)
+                self.arc_name = arc_name
+
+        def __str__(self):
+                return _("Archive %s is missing, unsupported, or corrupt.") % \
+                    self.arc_name
+
+
+class CorruptArchiveFiles(ArchiveErrors):
+        """Used to indicate that the specified file(s) could not be found in the
+        archive.
+        """
+
+        def __init__(self, arc_name, files):
+                ArchiveErrors.__init__(self)
+                self.arc_name = arc_name
+                self.files = files
+
+        def __str__(self):
+                return _("Package archive %(arc_name)s contains corrupt "
+                    "entries for the requested package file(s):\n%(files)s.") % {
+                    "arc_name": self.arc_name,
+                    "files": "\n".join(self.files) }
+
+
+class UnknownArchiveFiles(ArchiveErrors):
+        """Used to indicate that the specified file(s) could not be found in the
+        archive.
+        """
+
+        def __init__(self, arc_name, files):
+                ArchiveErrors.__init__(self)
+                self.arc_name = arc_name
+                self.files = files
+
+        def __str__(self):
+                return _("Package archive %(arc_name)s does not contain the "
+                    "requested package file(s):\n%(files)s.") % {
+                    "arc_name": self.arc_name,
+                    "files": "\n".join(self.files) }
+
+
+class UnknownPackageManifest(ArchiveErrors):
+        """Used to indicate that a manifest for the specified package could not
+        be found in the archive.
+        """
+
+        def __init__(self, arc_name, pfmri):
+                ArchiveErrors.__init__(self)
+                self.arc_name = arc_name
+                self.pfmri = pfmri
+
+        def __str__(self):
+                return _("No package manifest for package '%(pfmri)s' exists "
+                    "in archive %(arc_name)s.") % self.__dict__
+
+
+class Archive(object):
+        """Class representing a pkg(5) archive and a set of interfaces to
+        populate it and retrieve data from it.
+
+        This class stores package data in pax archives in version 4 repository
+        format.  Encoding the structure of a repository into the archive is
+        necessary to enable easy composition of package archive contents with
+        existing repositories and to enable consumers to access the contents of
+        a package archive the same as they would a repository.
+
+        This class can be used to access or extract the contents of almost any
+        tar archive, except for those that are compressed.
+        """
+
+        __idx_pfx = "pkg5.index."
+        __idx_sfx = ".gz"
+        __idx_name = "pkg5.index.%s.gz"
+        __idx_ver = ArchiveIndex.CURRENT_VERSION
+        __index = None
+        __arc_tfile = None
+        __arc_file = None
+        version = None
+
+        # If the repository format changes, then the version of the package
+        # archive format should be rev'd and this updated.  (Although that isn't
+        # strictly necessary, as the Repository class should remain backwards
+        # compatible with this format.)
+        CURRENT_VERSION = 0
+        COMPATIBLE_VERSIONS = (0,)
+
+        def __init__(self, pathname, mode="r"):
+                """'pathname' is the absolute path of the archive file to create
+                or read from.
+
+                'mode' is a string used to indicate whether the archive is being
+                opened for reading or writing, which is indicated by 'r' and 'w'
+                respectively.  An archive opened for writing may not be used for
+                any extraction operations, and must not already exist.
+                """
+
+                assert os.path.isabs(pathname)
+                self.__arc_name = pathname
+                self.__closed = False
+                self.__mode = mode
+                self.__temp_dir = tempfile.mkdtemp()
+
+                # Used to cache publisher objects.
+                self.__pubs = None
+
+                # Used to cache location of publisher catalog data.
+                self.__catalogs = {}
+
+                arc_mode = mode + "b"
+                mode += ":"
+
+                assert "r" in mode or "w" in mode
+                assert "a" not in mode
+                if "w" in mode:
+                        # Don't allow overwrite of existing archive.
+                        assert not os.path.exists(self.__arc_name)
+
+                try:
+                        self.__arc_file = open(self.__arc_name, arc_mode,
+                            128*1024)
+                except EnvironmentError, e:
+                        if e.errno in (errno.ENOENT, errno.EISDIR):
+                                raise InvalidArchive(self.__arc_name)
+                        raise apx._convert_error(e)
+
+                self.__queue_offset = 0
+                self.__queue = collections.deque()
+
+                # Ensure cleanup is performed on exit if the archive is not
+                # explicitly closed.
+                def arc_cleanup():
+                        if not self.__closed:
+                                self.__close_fh()
+                        self.__cleanup()
+                        return
+                atexit.register(arc_cleanup)
+
+                # Open the pax archive for the package.
+                try:
+                        self.__arc_tfile = ptf.PkgTarFile.open(mode=mode,
+                            fileobj=self.__arc_file, format=tf.PAX_FORMAT)
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+                except Exception:
+                        # Likely not an archive or the archive is corrupt.
+                        raise InvalidArchive(self.__arc_name)
+
+                self.__extract_offsets = {}
+                if "r" in mode:
+                        # Opening the tarfile loaded the first member, which
+                        # should be the archive index file.
+                        member = self.__arc_tfile.firstmember
+                        if not member:
+                                # Archive is empty.
+                                raise InvalidArchive(self.__arc_name)
+
+                        if not member.name.startswith(self.__idx_pfx) or \
+                            not member.name.endswith(self.__idx_sfx):
+                                return
+                        else:
+                                self.__idx_name = member.name
+
+                        comment = member.pax_headers.get("comment", "")
+                        if not comment.startswith("pkg5.archive.version."):
+                                return
+
+                        try:
+                                self.version = int(comment.rsplit(".", 1)[-1])
+                        except (IndexError, ValueError):
+                                raise InvalidArchive(self.__arc_name)
+
+                        if self.version not in self.COMPATIBLE_VERSIONS:
+                                raise InvalidArchive(self.__arc_name)
+
+                        # Create a temporary file to extract the index to,
+                        # and then extract it from the archive.
+                        fobj, idxfn = self.__mkstemp()
+                        fobj.close()
+                        try:
+                                self.__arc_tfile.extract_to(member,
+                                    path=self.__temp_dir,
+                                    filename=os.path.basename(idxfn))
+                        except tf.TarError:
+                                # Read error encountered.
+                                raise InvalidArchive(self.__arc_name)
+                        except EnvironmentError, e:
+                                raise apx._convert_error(e)
+
+                        # After extraction, the current archive file offset
+                        # is the base that will be used for all other
+                        # extractions.
+                        index_offset = self.__arc_tfile.offset
+
+                        # Load archive index.
+                        try:
+                                self.__index = ArchiveIndex(idxfn, mode="r",
+                                    version=self.__idx_ver)
+                                for name, offset in self.__index.offsets():
+                                        self.__extract_offsets[name] = \
+                                            index_offset + offset
+                        except InvalidArchiveIndex:
+                                # Index is corrupt; rather than driving on
+                                # and failing later, bail now.
+                                os.unlink(idxfn)
+                                raise InvalidArchive(self.__arc_name)
+                        except EnvironmentError, e:
+                                raise apx._convert_error(e)
+
+                elif "w" in mode:
+                        self.__pubs = {}
+
+                        # Force normalization of archive member mode and
+                        # ownership information during archive creation.
+                        def gettarinfo(*args, **kwargs):
+                                ti = ptf.PkgTarFile.gettarinfo(self.__arc_tfile,
+                                    *args, **kwargs)
+                                if ti.isreg():
+                                        ti.mode = pkg.misc.PKG_FILE_MODE
+                                elif ti.isdir():
+                                        ti.mode = pkg.misc.PKG_DIR_MODE
+                                if ti.name == "pkg5.index.0.gz":
+                                        ti.pax_headers["comment"] = \
+                                            "pkg5.archive.version.%d" % \
+                                            self.CURRENT_VERSION
+                                ti.uid = 0
+                                ti.gid = 0
+                                ti.uname = "root"
+                                ti.gname = "root"
+                                return ti
+                        self.__arc_tfile.gettarinfo = gettarinfo
+
+                        self.__idx_name = self.__idx_name % self.__idx_ver
+
+                        # Create a temporary file to write the index to,
+                        # and then create the index.
+                        fobj, idxfn = self.__mkstemp()
+                        fobj.close()
+                        self.__index = ArchiveIndex(idxfn, mode=arc_mode)
+
+                        # Used to determine what the default publisher will be
+                        # for the archive file at close().
+                        self.__default_pub = ""
+
+                        # Used to keep track of which package files have already
+                        # been added to archive.
+                        self.__processed_pfiles = set()
+
+                        # Always create archives using current version.
+                        self.version = self.CURRENT_VERSION
+
+                        # Always add base publisher directory to start; tarfile
+                        # requires an actual filesystem object to do this, so
+                        # re-use an existing directory to do so.
+                        self.add("/", arcname="publisher")
+
+        def __exit__(self, exc_type, exc_value, exc_tb):
+                """Context handler that ensures archive is automatically closed
+                in a non-error condition scenario.  This enables 'with' usage.
+                """
+
+                if exc_type or exc_value or exc_tb:
+                        # Only close file objects; don't actually write anything
+                        # out in an error condition.
+                        self.__close_fh()
+                        return
+
+                # Close and/or write out archive as needed.
+                self.close()
+
+        def __find_extract_offsets(self):
+                """Private helper method to find offsets for individual archive
+                member extraction.
+                """
+
+                if self.__extract_offsets:
+                        return
+
+                # This causes the entire archive to be read, but is the only way
+                # to find the offsets to extract everything.
+                try:
+                        for member in self.__arc_tfile.getmembers():
+                                self.__extract_offsets[member.name] = \
+                                    member.offset
+                except tf.TarError:
+                        # Read error encountered.
+                        raise InvalidArchive(self.__arc_name)
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+
+        def __mkdtemp(self):
+                """Creates a temporary directory for use during archive
+                operations, and return its absolute path.  The temporary
+                directory will be removed after the archive is closed.
+                """
+
+                try:
+                        return tempfile.mkdtemp(dir=self.__temp_dir)
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+
+        def __mkstemp(self):
+                """Creates a temporary file for use during archive operations,
+                and returns a file object for it and its absolute path.  The
+                temporary file will be removed after the archive is closed.
+                """
+                try:
+                        fd, fn = tempfile.mkstemp(dir=self.__temp_dir)
+                        fobj = os.fdopen(fd, "wb")
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+                return fobj, fn
+
+        def add(self, pathname, arcname=None):
+                """Queue the specified object for addition to the archive.
+                The archive will be created and the object added to it when the
+                close() method is called.  The target object must not change
+                after this method is called while the archive is open.  The
+                item being added must not already exist in the archive.
+
+                'pathname' is an optional string specifying the absolute path
+                of a file to add to the archive.  The file may be a regular
+                file, directory, symbolic link, or hard link. 
+
+                'arcname' is an optional string specifying an alternative name
+                for the file in the archive.  If not given, the full pathname
+                provided will be used.
+                """
+
+                assert not self.__closed and "w" in self.__mode
+                tfile = self.__arc_tfile
+                ti = tfile.gettarinfo(pathname, arcname=arcname)
+                buf = ti.tobuf(tfile.format, tfile.encoding, tfile.errors)
+
+                # Pre-calculate size of archive entry by determining where
+                # in the archive the entry would be added.
+                entry_sz = len(buf)
+                blocks, rem = divmod(ti.size, tf.BLOCKSIZE)
+                if rem > 0:
+                        blocks += 1
+                entry_sz += blocks * tf.BLOCKSIZE
+
+                # Record name, offset, entry_size, size type for each file.
+                self.__index.add(ti.name, self.__queue_offset, entry_sz,
+                    ti.size, ti.type)
+                self.__queue_offset += entry_sz
+                self.__queue.append((pathname, ti.name))
+
+                # Discard tarinfo; it would be more efficient to keep these in
+                # memory, but at a significant memory footprint cost.
+                ti.tarfile = None
+                del ti
+
+        def __add_publisher_files(self, root, file_dir, hashes, fpath=None,
+            repo=None):
+                """Private helper function for adding package files."""
+
+                if file_dir not in self.__processed_pfiles:
+                        # Directory entry needs to be added
+                        # for package files.
+                        self.add(root, arcname=file_dir)
+                        self.__processed_pfiles.add(file_dir)
+
+                for fhash in hashes:
+                        hash_dir = os.path.join(file_dir, fhash[:2])
+                        if hash_dir not in self.__processed_pfiles:
+                                # Directory entry needs to be added
+                                # for hash directory.
+                                self.add(root, arcname=hash_dir)
+                                self.__processed_pfiles.add(hash_dir)
+
+                        hash_fname = os.path.join(hash_dir, fhash)
+                        if hash_fname in self.__processed_pfiles:
+                                # Already added for a different
+                                # package.
+                                continue
+
+                        if repo:
+                                src = repo.file(fhash)
+                        else:
+                                src = os.path.join(fpath, fhash)
+                        self.add(src, arcname=hash_fname)
+
+                        # A bit expensive potentially in terms of
+                        # memory usage, but necessary to prevent
+                        # duplicate archive entries.
+                        self.__processed_pfiles.add(hash_fname)
+
+        def __add_package(self, pfmri, mpath, fpath=None, repo=None):
+                """Private helper function that queues a package for addition to
+                the archive.
+
+                'mpath' is the absolute path of the package manifest file.
+
+                'fpath' is an optional directory containing the package files
+                stored by hash.
+
+                'repo' is an optional Repository object to use to retrieve the
+                data for the package to be added to the archive.
+
+                'fpath' or 'repo' must be provided.
+                """
+
+                assert not self.__closed and "w" in self.__mode
+                assert mpath
+                assert not (fpath and repo)
+                assert fpath or repo
+
+                if not self.__default_pub:
+                        self.__default_pub = pfmri.publisher
+
+                m = pkg.manifest.Manifest(pfmri)
+                m.set_content(pathname=mpath)
+
+                # Throughout this function, the archive root directory is used
+                # as a template to add other directories that should be present
+                # in the archive.  This is necessary as the tarfile class does
+                # not support adding arbitrary archive entries without a real
+                # filesystem object as a source.
+                root = os.path.dirname(self.__arc_name)
+                pub_dir = os.path.join("publisher", pfmri.publisher)
+                pkg_dir = os.path.join(pub_dir, "pkg")
+                for d in pub_dir, pkg_dir:
+                        if d not in self.__processed_pfiles:
+                                self.add(root, arcname=d)
+                                self.__processed_pfiles.add(d)
+
+                # After manifest has been loaded, assume it's ok to queue the
+                # manifest itself for addition to the archive.
+                arcname = os.path.join(pkg_dir, pfmri.get_dir_path())
+
+                # Entry may need to be added for manifest directory.
+                man_dir = os.path.dirname(arcname)
+                if man_dir not in self.__processed_pfiles:
+                        self.add(root, arcname=man_dir)
+                        self.__processed_pfiles.add(man_dir)
+
+                # Entry needs to be added for manifest file.
+                self.add(mpath, arcname=arcname)
+
+                # Now add any files to the archive for every action that has a
+                # payload.  (That payload can consist of multiple files.)
+                file_dir = os.path.join(pub_dir, "file")
+                for a in m.gen_actions():
+                        if not a.has_payload or not a.hash:
+                                # Nothing to archive.
+                                continue
+
+                        payloads = set([a.hash])
+
+                        # Signature actions require special handling.
+                        if a.name == "signature":
+                                payloads.update(a.attrs.get("chain",
+                                    "").split())
+
+                                if repo:
+                                        # This bit of logic only possible if
+                                        # package source is a repository.
+                                        pub = self.__pubs.get(pfmri.publisher,
+                                            None)
+                                        if not pub:
+                                                self.__pubs[pfmri.publisher] = \
+                                                    pub = repo.get_publisher(
+                                                    pfmri.publisher)
+                                                assert pub
+
+                                        payloads.update(pub.signing_ca_certs)
+                                        payloads.update(pub.intermediate_certs)
+
+                        if not payloads:
+                                # Nothing more to do.
+                                continue
+
+                        self.__add_publisher_files(root, file_dir, payloads,
+                             fpath=fpath, repo=repo)
+
+        def add_package(self, pfmri, mpath, fpath):
+                """Queues the specified package for addition to the archive.
+                The archive will be created and the package added to it when
+                the close() method is called.  The package contents must not
+                change after this method is called while the archive is open.
+                Please note that, for signed packages, signing certificates
+                used by the publisher are not automatically added to the
+                archive.
+
+                'pfmri' is the FMRI string or object identifying the package to
+                add.
+
+                'mpath' is the absolute path of the package manifest file.
+
+                'fpath' is the directory containing the package files stored
+                by hash.
+                """
+
+                assert pfmri and mpath and fpath
+                if isinstance(pfmri, basestring):
+                        pfmri = pkg.fmri.PkgFmri(pfmri)
+                assert pfmri.publisher
+                self.__add_package(pfmri, mpath, fpath=fpath)
+
+        def add_signing_certs(self, pub, hashes, ca):
+                """Queues the specified publisher certs for addition to the
+                archive. The archive will be created and the certs added to it
+                when the close() method is called.  The cert contents must not
+                change after this method is called while the archive is open.
+
+                'pub' is the prefix of the publisher to store the package
+                files for.
+
+                'hashes' is the list of certificate hash files to store.
+                (The certificate files must be in the same compressed format
+                that the Repository class stores them in.)
+
+                'ca' is a boolean indicating whether the certs are added as
+                as CA certificates or intermediate certificates.
+                """
+
+                root = os.path.dirname(self.__arc_name)
+                pub_dir = os.path.join("publisher", pub)
+                file_dir = os.path.join(pub_dir, "file")
+
+                pubobj = self.__pubs.get(pub, None) 
+                if not pubobj:
+                        self.__pubs[pub] = pubobj = \
+                            pkg.client.publisher.Publisher(pub)
+
+                for fname in hashes:
+                        hsh = os.path.basename(fname)
+                        self.__add_publisher_files(root, file_dir, [hsh],
+                            fpath=os.path.dirname(fname))
+                        if ca:
+                                pubobj.signing_ca_certs.append(hsh)
+                        else:
+                                pubobj.intermediate_certs.append(hsh)
+
+        def add_repo_package(self, pfmri, repo):
+                """Queues the specified package in a repository for addition to
+                the archive. The archive will be created and the package added
+                to it when the close() method is called.  The package contents
+                must not change after this method is called while the archive is
+                open.
+
+                'pfmri' is the FMRI string or object identifying the package to
+                add.
+
+                'repo' is the Repository object to use to retrieve the data for
+                the package to be added to the archive.
+                """
+
+                assert pfmri and repo
+                if isinstance(pfmri, basestring):
+                        pfmri = pkg.fmri.PkgFmri(pfmri)
+                assert pfmri.publisher
+                self.__add_package(pfmri, repo.manifest(pfmri), repo=repo)
+
+        def extract_catalog1(self, part, path, pub=None):
+                """Extract the named v1 catalog part to the specified directory.
+
+                'part' is the name of the catalog file part.
+
+                'path' is the absolute path of the directory to extract the
+                file to.  It will be created automatically if it does not
+                exist.
+
+                'pub' is an optional publisher prefix.  If not provided, the
+                first publisher catalog found in the archive will be used.
+                """
+
+                # If the extraction index doesn't exist, scan the
+                # complete archive and build one.
+                self.__find_extract_offsets()
+
+                pubs = [
+                    p for p in self.get_publishers()
+                    if not pub or p.prefix == pub
+                ]
+                if not pubs:
+                        raise UnknownArchiveFiles(self.__arc_name, [part])
+
+                if not pub:
+                        # Default to first known publisher.
+                        pub = pubs[0].prefix
+
+                # Expected locations in archive for various metadata.
+                # A trailing slash is appended so that archive entry
+                # comparisons skip the entries for the directory.
+                pubpath = os.path.join("publisher", pub) + os.path.sep
+                catpath = os.path.join(pubpath, "catalog") + os.path.sep
+                partpath = os.path.join(catpath, part)
+
+                if pub in self.__catalogs:
+                        # Catalog file requested for this publisher before.
+                        croot = self.__catalogs[pub]
+                        if croot:
+                                # Catalog data is cached because it was
+                                # generated on demand, so just copy it
+                                # from there to the destination.
+                                src = os.path.join(croot, part)
+                                if not os.path.exists(src):
+                                        raise UnknownArchiveFiles(
+                                            self.__arc_name, [partpath])
+
+                                try:
+                                        pkg.portable.copyfile(
+                                            os.path.join(croot, part),
+                                            os.path.join(path, part))
+                                except EnvironmentError, e:
+                                        raise apx._convert_error(e)
+                        else:
+                                # Use default extraction logic.
+                                self.extract_to(partpath, path, filename=part)
+                        return
+
+                # Determine whether any catalog files are present for this
+                # publisher in the archive.
+                for name in self.__extract_offsets:
+                        if name.startswith(catpath):
+                                # Any catalog file at all means this publisher
+                                # should be marked as being known to have one
+                                # and then the request passed on to extract_to.
+                                self.__catalogs[pub] = None
+                                return self.extract_to(partpath, path,
+                                    filename=part)
+
+                # No catalog data found for publisher; construct a catalog
+                # in memory based on packages found for publisher.
+                cat = pkg.catalog.Catalog(batch_mode=True, sign=False)
+                manpath = os.path.join(pubpath, "pkg") + os.path.sep
+                for name in self.__extract_offsets:
+                        if name.startswith(manpath) and name.count("/") == 4:
+                                ignored, stem, ver = name.rsplit("/", 2)
+                                stem = urllib.unquote(stem)
+                                ver = urllib.unquote(ver)
+                                pfmri = pkg.fmri.PkgFmri("%s@%s" % (stem, ver),
+                                    publisher=pub)
+
+                                fobj = self.get_file(name)
+                                m = pkg.manifest.Manifest(pfmri=pfmri)
+                                m.set_content(content=fobj.read(),
+                                    signatures=True)
+                                cat.add_package(pfmri, manifest=m)
+
+                # Store catalog in a temporary directory and mark publisher
+                # as having catalog data cached.
+                croot = self.__mkdtemp()
+                cat.meta_root = croot
+                cat.batch_mode = False
+                cat.finalize()
+                cat.save()
+                self.__catalogs[pub] = croot
+
+                # Finally, copy requested file to destination.
+                try:
+                        pkg.portable.copyfile(os.path.join(croot, part),
+                            os.path.join(path, part))
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+
+        def extract_package_files(self, hashes, path, pub=None):
+                """Extract one or more package files from the archive.
+
+                'hashes' is a list of the files to extract named by their hash.
+
+                'path' is the absolute path of the directory to extract the
+                files to.  It will be created automatically if it does not
+                exist.
+
+                'pub' is the prefix (name) of the publisher that the package
+                files are associated with.  If not provided, the first file
+                named after the given hash found in the archive will be used.
+                (This will be noticeably slower depending on the size of the
+                archive.)
+                """
+
+                assert not self.__closed and "r" in self.__mode
+                assert hashes
+
+                # If the extraction index doesn't exist, scan the complete
+                # archive and build one.
+                self.__find_extract_offsets()
+
+                if not pub:
+                        # Scan extract offsets index for the first instance of
+                        # any package file seen for each hash and extract the
+                        # file as each is found.
+                        hashes = set(hashes)
+
+                        for name in self.__extract_offsets:
+                                for fhash in hashes:
+                                        hash_fname = os.path.join("file",
+                                            fhash[:2], fhash)
+                                        if name.endswith(hash_fname):
+                                                self.extract_to(name, path,
+                                                    filename=fhash)
+                                                hashes.discard(fhash)
+                                                break
+                                if not hashes:
+                                        break
+
+                        if hashes:
+                                # Any remaining hashes are for package files
+                                # that couldn't be found.
+                                raise UnknownArchiveFiles(self.__arc_name,
+                                    hashes)
+                        return
+
+                for fhash in hashes:
+                        arcname = os.path.join("publisher", pub, "file",
+                            fhash[:2], fhash)
+                        self.extract_to(arcname, path, filename=fhash)
+
+        def extract_package_manifest(self, pfmri, path, filename=""):
+                """Extract a package manifest from the archive.
+
+                'pfmri' is the FMRI string or object identifying the package
+                manifest to extract.
+
+                'path' is the absolute path of the directory to extract the
+                manifest to.  It will be created automatically if it does not
+                exist.
+
+                'filename' is an optional name to use for the extracted file.
+                If not provided, the default behaviour is to create a directory
+                named after the package stem in 'path' and a file named after
+                the version in that directory; both components will be URI
+                encoded.
+                """
+
+                assert not self.__closed and "r" in self.__mode
+                assert pfmri and path
+                if isinstance(pfmri, basestring):
+                        pfmri = pkg.fmri.PkgFmri(pfmri)
+                assert pfmri.publisher
+
+                if not filename:
+                        filename = pfmri.get_dir_path()
+
+                arcname = os.path.join("publisher", pfmri.publisher, "pkg",
+                    pfmri.get_dir_path())
+                try:
+                        self.extract_to(arcname, path, filename=filename)
+                except UnknownArchiveFiles:
+                        raise UnknownPackageManifest(self.__arc_name, pfmri)
+
+        def extract_to(self, src, path, filename=""):
+                """Extract a member from the archive.
+
+                'src' is the pathname of the archive file to extract.
+
+                'path' is the absolute path of the directory to extract the file
+                to.
+
+                'filename' is an optional string indicating the name to use for
+                the extracted file.  If not provided, the full member name in
+                the archive will be used.
+                """
+
+                assert not self.__closed and "r" in self.__mode
+
+                # Get the offset in the archive for the given file, and then
+                # seek to it.
+                offset = self.__extract_offsets.get(src, None)
+                tfile = self.__arc_tfile
+                if offset is not None:
+                        # Prepare the tarfile object for extraction by telling
+                        # it where to look for the file.
+                        self.__arc_file.seek(offset)
+                        tfile.offset = offset
+
+                        # Get the tarinfo object needed to extract the file.
+                        try:
+                                member = tf.TarInfo.fromtarfile(tfile)
+                        except tf.TarError:
+                                # Read error encountered.
+                                raise InvalidArchive(self.__arc_name)
+                        except EnvironmentError, e:
+                                raise apx._convert_error(e)
+
+                        if member.name != src:
+                                # Index must be invalid or tarfile has gone off
+                                # the rails trying to read the archive.
+                                raise InvalidArchive(self.__arc_name)
+
+                elif self.__extract_offsets:
+                        # Assume there is no such archive member if extract
+                        # offsets are known, but the item can't be found.
+                        raise UnknownArchiveFiles(self.__arc_name, [src])
+                else:
+                        # No archive index; fallback to retrieval by name.
+                        member = src
+
+                # Extract the file to the specified location.
+                try:
+                        self.__arc_tfile.extract_to(member, path=path,
+                            filename=filename)
+                except KeyError:
+                        raise UnknownArchiveFiles(self.__arc_name, [src])
+                except tf.TarError:
+                        # Read error encountered.
+                        raise InvalidArchive(self.__arc_name)
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+
+                if not isinstance(member, tf.TarInfo):
+                        # Nothing more to do.
+                        return
+
+                # If possible, validate the size of the extracted object.
+                try:
+                        if not filename:
+                                filename = member.name
+                        dest = os.path.join(path, filename)
+                        if os.stat(dest).st_size != member.size:
+                                raise CorruptArchiveFiles(self.__arc_name,
+                                    [src])
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+
+        def get_file(self, src):
+                """Returns an archive member as a file object.  If the matching
+                member is a regular file, a file-like object will be returned.
+                If it is a link, a file-like object is constructed from the
+                link's target.  In all other cases, None will be returned.  The
+                file-like object is read-only and provides methods: read(),
+                readline(), readlines(), seek() and tell().  The returned object
+                must be closed before the archive is, and must not be used after
+                the archive is closed.
+
+                'src' is the pathname of the archive file to return.
+                """
+
+                assert not self.__closed and "r" in self.__mode
+
+                # Get the offset in the archive for the given file, and then
+                # seek to it.
+                offset = self.__extract_offsets.get(src, None)
+                tfile = self.__arc_tfile
+                if offset is not None:
+                        # Prepare the tarfile object for extraction by telling
+                        # it where to look for the file.
+                        self.__arc_file.seek(offset)
+                        tfile.offset = offset
+
+                        # Get the tarinfo object needed to extract the file.
+                        member = tf.TarInfo.fromtarfile(tfile)
+                elif self.__extract_offsets:
+                        # Assume there is no such archive member if extract
+                        # offsets are known, but the item can't be found.
+                        raise UnknownArchiveFiles(self.__arc_name, [src])
+                else:
+                        # No archive index; fallback to retrieval by name.
+                        member = src
+
+                # Finally, return the object for the matching archive member.
+                try:
+                        return tfile.extractfile(member)
+                except KeyError:
+                        raise UnknownArchiveFiles(self.__arc_name, [src])
+
+        def get_package_file(self, fhash, pub=None):
+                """Returns the first package file matching the given hash as a
+                file-like object. The file-like object is read-only and provides
+                methods: read(), readline(), readlines(), seek() and tell().
+                The returned object  must be closed before the archive is, and
+                must not be used after the archive is closed.
+
+                'fhash' is the hash name of the file to return.
+
+                'pub' is the prefix (name) of the publisher that the package
+                files are associated with.  If not provided, the first file
+                named after the given hash found in the archive will be used.
+                (This will be noticeably slower depending on the size of the
+                archive.)
+                """
+
+                assert not self.__closed and "r" in self.__mode
+
+                if not self.__extract_offsets:
+                        # If the extraction index doesn't exist, scan the
+                        # complete archive and build one.
+                        self.__find_extract_offsets()
+
+                if not pub:
+                        # Scan extract offsets index for the first instance of
+                        # any package file seen for the hash and extract it.
+                        hash_fname = os.path.join("file", fhash[:2], fhash)
+                        for name in self.__extract_offsets:
+                                if name.endswith(hash_fname):
+                                        return self.get_file(name)
+                        raise UnknownArchiveFiles(self.__arc_name, [fhash])
+
+                return self.get_file(os.path.join("publisher", pub, "file",
+                    fhash[:2], fhash))
+
+        def get_package_manifest(self, pfmri, raw=False):
+                """Returns a package manifest from the archive.
+
+                'pfmri' is the FMRI string or object identifying the package
+                manifest to extract.
+
+                'raw' is an optional boolean indicating whether the raw
+                content of the Manifest should be returned.  If True,
+                a file-like object containing the content of the manifest.
+                If False, a Manifest object will be returned.
+                """
+
+                assert not self.__closed and "r" in self.__mode
+                assert pfmri
+                if isinstance(pfmri, basestring):
+                        pfmri = pkg.fmri.PkgFmri(pfmri)
+                assert pfmri.publisher
+
+                arcname = os.path.join("publisher", pfmri.publisher, "pkg",
+                    pfmri.get_dir_path())
+
+                try:
+                        fobj = self.get_file(arcname)
+                except UnknownArchiveFiles:
+                        raise UnknownPackageManifest(self.__arc_name, pfmri)
+
+                if raw:
+                        return fobj
+
+                m = pkg.manifest.Manifest(pfmri=pfmri)
+                m.set_content(content=fobj.read(), signatures=True)
+                return m
+
+        def get_publishers(self):
+                """Return a list of publisher objects for all publishers used
+                in the archive."""
+
+                if self.__pubs:
+                        return self.__pubs.values()
+
+                # If the extraction index doesn't exist, scan the complete
+                # archive and build one.
+                self.__find_extract_offsets()
+
+                # Search through offset index to find publishers
+                # in use.
+                self.__pubs = {}
+                for name in self.__extract_offsets:
+                        if name.count("/") == 1 and \
+                            name.startswith("publisher/"):
+                                ignored, pfx = name.split("/", 1)
+
+                                # See if this publisher has a .p5i file in the
+                                # archive (needed for signed packages).
+                                p5iname = os.path.join("publisher", pfx,
+                                    "pub.p5i")
+                                try:
+                                        fobj = self.get_file(p5iname)
+                                except UnknownArchiveFiles:
+                                        # No p5i; that's ok.
+                                        pub = pkg.client.publisher.Publisher(
+                                            pfx)
+                                else:
+                                        pubs = pkg.p5i.parse(fileobj=fobj)
+                                        assert len(pubs) == 1
+                                        pub = pubs[0][0]
+                                        assert pub
+
+                                self.__pubs[pfx] = pub
+
+                return self.__pubs.values()
+
+        def __cleanup(self):
+                """Private helper method to cleanup temporary files."""
+
+                try:
+                        if os.path.exists(self.__temp_dir):
+                                shutil.rmtree(self.__temp_dir)
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+
+        def __close_fh(self):
+                """Private helper method to close filehandles."""
+
+                # Some archives may not have an index.
+                if self.__index:
+                        self.__index.close()
+                        self.__index = None
+
+                # A read error during archive load may cause these to have
+                # never been set.
+                if self.__arc_tfile:
+                        self.__arc_tfile.close()
+                        self.__arc_tfile = None
+
+                if self.__arc_file:
+                        self.__arc_file.close()
+                        self.__arc_file = None
+                self.__closed = True
+
+        def close(self, progtrack=None):
+                """If mode is 'r', this will close the archive file.  If mode is
+                'w', this will write all queued files to the archive and close
+                it.  Further operations on the archive are not possible after
+                calling this function."""
+
+                assert not self.__closed
+
+                if "w" not in self.__mode:
+                        self.__close_fh()
+                        self.__cleanup()
+                        return
+
+                # Add the standard pkg5.repository file before closing the
+                # index.
+                fobj, fname = self.__mkstemp()
+                fobj.write("[CONFIGURATION]\nversion = 4\n\n"
+                    "[publisher]\nprefix = %s\n\n"
+                    "[repository]\nversion = 4\n" % self.__default_pub)
+                fobj.close()
+                self.add(fname, arcname="pkg5.repository")
+
+                # If any publisher objects were cached, then there were
+                # signed packages present, and p5i information for each
+                # must be added to the archive so that the client can
+                # handle signing ca and intermediate certs.
+                for pub in self.__pubs.values():
+                        # A new publisher object is created with a copy of only
+                        # the information that's needed for the archive.
+                        npub = pkg.client.publisher.Publisher(pub.prefix,
+                            alias=pub.alias, ca_certs=pub.signing_ca_certs,
+                            intermediate_certs=pub.intermediate_certs,
+                            revoked_ca_certs=pub.revoked_ca_certs,
+                            approved_ca_certs=pub.approved_ca_certs)
+
+                        # Create a p5i file.
+                        fobj, fn = self.__mkstemp()
+                        pkg.p5i.write(fobj, [npub])
+                        fobj.close()
+
+                        # Queue the p5i file for addition to the archive.
+                        arcname = os.path.join("publisher", npub.prefix,
+                            "pub.p5i")
+                        self.add(fn, arcname=arcname)
+
+                # Close the index; no more entries can be added.
+                self.__index.close()
+
+                # If a tracker was provided, setup a progress goal.
+                idxbytes = 0
+                if progtrack:
+                        nfiles = len(self.__queue)
+                        nbytes = self.__queue_offset
+                        try:
+                                fs = os.stat(self.__index.pathname)
+                                nfiles += 1
+                                idxbytes = fs.st_size
+                                nbytes += idxbytes
+                        except EnvironmentError, e:
+                                raise apx._convert_error(e)
+
+                        progtrack.archive_set_goal(
+                            os.path.basename(self.__arc_name), nfiles,
+                            nbytes)
+
+                # Add the index file to the archive as the first file; it will
+                # automatically be marked with a comment identifying the index
+                # version.
+                tfile = self.__arc_tfile
+                tfile.add(self.__index.pathname, arcname=self.__idx_name)
+                if progtrack:
+                        progtrack.archive_add_progress(1, idxbytes)
+                self.__index = None
+
+                # Add all queued files to the archive.
+                while self.__queue:
+                        src, arcname = self.__queue.popleft()
+
+                        start_offset = tfile.offset
+                        tfile.add(src, arcname=arcname, recursive=False)
+
+                        # tarfile caches member information for every item
+                        # added by default, which provides fast access to the
+                        # archive contents after generation, but isn't needed
+                        # here (and uses a significant amount of memory).
+                        # Plus popping it off the stack here allows use of
+                        # the object's info to provide progress updates.
+                        ti = tfile.members.pop()
+                        if progtrack:
+                                progtrack.archive_add_progress(1,
+                                    tfile.offset - start_offset)
+                        ti.tarfile = None
+                        del ti
+
+                # Cleanup temporary files.
+                self.__cleanup()
+
+                # Archive created; success!
+                if progtrack:
+                        progtrack.archive_done()
+                self.__close_fh()
+
+        @property
+        def pathname(self):
+                """The absolute path of the archive file."""
+                return self.__arc_name
--- a/src/modules/pkgtarfile.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/pkgtarfile.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,11 +21,9 @@
 #
 
 #
-# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
+# Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
-
 import os
 import stat
 import tarfile
@@ -46,10 +44,9 @@
         XXX - Push these changes upstream to Python maintainers?
         """
 
-        def __init__(self, name=None, mode="r", fileobj=None, errorlevel=2):
-
-                tarfile.TarFile.__init__(self, name, mode, fileobj)
-                self.errorlevel = errorlevel
+        def __init__(self, *args, **kwargs):
+                kwargs.setdefault("errorlevel", 2)
+                tarfile.TarFile.__init__(self, *args, **kwargs)
 
         def extract_to(self, member, path="", filename=""):
                 """Extract a member from the TarFile archive.  This
--- a/src/modules/publish/transaction.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/publish/transaction.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 """Provides a set of publishing interfaces for interacting with a pkg(5)
@@ -226,7 +226,7 @@
 
                 if create_repo:
                         try:
-                                # For compatbility reasons, assume that
+                                # For compatibility reasons, assume that
                                 # repositories created using pkgsend
                                 # should be in version 3 format (single
                                 # publisher only).
--- a/src/modules/server/repository.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/server/repository.py	Wed Feb 09 18:43:21 2011 -0800
@@ -1648,6 +1648,13 @@
                 validity.
                 """
 
+                try:
+                        if not create and self.root and \
+                            os.path.isfile(self.root):
+                                raise RepositoryInvalidError(self.root)
+                except EnvironmentError, e:
+                        raise apx._convert_error(e)
+
                 cfgpathname = None
                 if self.__cfgpathname:
                         # Use the custom configuration.
@@ -2373,6 +2380,7 @@
                                 # file created to do this is moved into place
                                 # by the insert.
                                 fd, pth = tempfile.mkstemp()
+                                os.fchmod(fd, misc.PKG_FILE_MODE)
                                 gfh = PkgGzipFile(filename=pth, mode="wb")
                                 gfh.write(s)
                                 gfh.close()
--- a/src/modules/server/transaction.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/modules/server/transaction.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import calendar
@@ -651,5 +651,7 @@
                 # Move each file to file_root, with appropriate directory
                 # structure.
                 for f in os.listdir(self.dir):
+                        if f == "append":
+                                continue
                         src_path = os.path.join(self.dir, f)
                         self.rstore.cache_store.insert(f, src_path)
--- a/src/pkg/manifests/package%2Fpkg.p5m	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/pkg/manifests/package%2Fpkg.p5m	Wed Feb 09 18:43:21 2011 -0800
@@ -18,7 +18,7 @@
 #
 # CDDL HEADER END
 #
-# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 set name=pkg.fmri value=pkg:/package/pkg@$(PKGVERS)
@@ -155,6 +155,7 @@
 file path=$(PYDIRVP)/pkg/misc.py
 file path=$(PYDIRVP)/pkg/nrlock.py
 file path=$(PYDIRVP)/pkg/p5i.py
+file path=$(PYDIRVP)/pkg/p5p.py
 file path=$(PYDIRVP)/pkg/pkggzip.py
 file path=$(PYDIRVP)/pkg/pkgsubprocess.py
 file path=$(PYDIRVP)/pkg/pkgtarfile.py
--- a/src/pkgdep.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/pkgdep.py	Wed Feb 09 18:43:21 2011 -0800
@@ -41,7 +41,7 @@
 import pkg.publish.dependencies as dependencies
 from pkg.misc import msg, emsg, PipeError
 
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 PKG_CLIENT_NAME = "pkgdepend"
 
 DEFAULT_SUFFIX = ".res"
--- a/src/pull.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/pull.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import calendar
@@ -33,20 +33,17 @@
 import tempfile
 import traceback
 import urllib
-import urlparse
 import warnings
 
 import pkg.catalog as catalog
 import pkg.client.progress as progress
-import pkg.config as cfg
 import pkg.fmri
 import pkg.manifest as manifest
 import pkg.client.api_errors as apx
 import pkg.client.transport.transport as transport
 import pkg.misc as misc
+import pkg.p5p
 import pkg.publish.transaction as trans
-import pkg.search_errors as search_errors
-import pkg.server.repository as sr
 import pkg.version as version
 
 from pkg.client import global_settings
@@ -88,12 +85,17 @@
 
         msg(_("""\
 Usage:
-        pkgrecv [-s src_uri] [-d (path|dest_uri)] [-c cache_dir]
+        pkgrecv [-s src_uri] [-a] [-d (path|dest_uri)] [-c cache_dir]
             [-kr] [-m match] [-n] [--raw] [--key keyfile --cert certfile] 
             (fmri|pattern) ...
         pkgrecv [-s src_repo_uri] --newest 
 
 Options:
+        -a              Store the retrieved package data in a pkg(5) archive
+                        at the location specified by -d.  The file may not
+                        already exist, and this option may only be used with
+                        filesystem-based destinations.
+
         -c cache_dir    The path to a directory that will be used to cache
                         downloaded content.  If one is not supplied, the
                         client will automatically pick a cache directory.
@@ -192,11 +194,11 @@
                         progresstracker = progress.CommandLineProgressTracker()
         return progresstracker
 
-def get_manifest(pfmri, basedir, contents=False):
+def get_manifest(pfmri, xport_cfg, contents=False):
 
         m = None
-        pkgdir = os.path.join(basedir, pfmri.get_dir_path())
-        mpath = os.path.join(pkgdir, "manifest")
+        pkgdir = xport_cfg.get_pkg_dir(pfmri)
+        mpath = xport_cfg.get_pkg_pathname(pfmri)
 
         if not os.path.exists(mpath):
                 m = xport.get_manifest(pfmri)
@@ -228,7 +230,6 @@
 def expand_matching_fmris(fmri_list, pfmri_strings):
         """find matching fmris using pattern matching and
         constraint auto."""
-        counthash = {}
 
         try:
                 patterns = [
@@ -238,11 +239,14 @@
         except pkg.fmri.FmriError, e:
                 abort(err=e)
 
-        return catalog.extract_matching_fmris(fmri_list,
+        matched_pats = {}
+
+        matches, unmatched = catalog.extract_matching_fmris(fmri_list,
             patterns=patterns, constraint=version.CONSTRAINT_AUTO,
-            matcher=pkg.fmri.glob_match)
+            matcher=pkg.fmri.glob_match, counthash=matched_pats)
+        return matched_pats, matches, unmatched
 
-def get_dependencies(src_uri, fmri_list, basedir, tracker):
+def get_dependencies(src_uri, fmri_list, xport_cfg, tracker):
 
         old_limit = sys.getrecursionlimit()
         # The user may be recursing 'entire' or 'redistributable'.
@@ -251,40 +255,45 @@
         s = set()
         for f in fmri_list:
                 pfmri = expand_fmri(f)
-                _get_dependencies(src_uri, s, pfmri, basedir, tracker)
+                _get_dependencies(src_uri, s, pfmri, xport_cfg, tracker)
 
         # Restore the previous default.
         sys.setrecursionlimit(old_limit)
 
         return list(s)
 
-def _get_dependencies(src_uri, s, pfmri, basedir, tracker):
+def _get_dependencies(src_uri, s, pfmri, xport_cfg, tracker):
         """Expand all dependencies."""
         tracker.evaluate_progress(fmri=pfmri)
         s.add(pfmri)
 
-        m = get_manifest(pfmri, basedir)
+        m = get_manifest(pfmri, xport_cfg)
         for a in m.gen_actions_by_type("depend"):
                 new_fmri = expand_fmri(a.attrs["fmri"])
                 if new_fmri and new_fmri not in s:
-                        _get_dependencies(src_uri, s, new_fmri, basedir,
+                        _get_dependencies(src_uri, s, new_fmri, xport_cfg,
                             tracker)
         return s
 
 def add_hashes_to_multi(mfst, multi):
-        """Takes a manifest and a multi object. Adds the hashes to the
-        multi object, returns (get_bytes, send_bytes) tuple."""
+        """Takes a manifest and a multi object. Adds the hashes to the multi
+        object, returns (get_bytes, get_files, send_bytes, send_comp_bytes)
+        tuple."""
 
         getb = 0
+        getf = 0
         sendb = 0
+        sendcb = 0
 
         for atype in ("file", "license"):
                 for a in mfst.gen_actions_by_type(atype):
                         if a.needsdata(None, None):
                                 multi.add_action(a)
                                 getb += get_pkg_otw_size(a)
+                                getf += 1
                                 sendb += int(a.attrs.get("pkg.size", 0))
-        return getb, sendb
+                                sendcb += int(a.attrs.get("pkg.csize", 0))
+        return getb, getf, sendb, sendcb
 
 def prune(fmri_list, all_versions, all_timestamps):
         """Returns a filtered version of fmri_list based on the provided
@@ -365,7 +374,9 @@
         return fmri_list
 
 def main_func():
-        global cache_dir, download_start, xport, xport_cfg, dest_xport, targ_pub
+        global cache_dir, download_start, xport, xport_cfg, dest_xport, \
+            temp_root, targ_pub
+
         all_timestamps = False
         all_versions = False
         dry_run = False
@@ -376,6 +387,7 @@
         target = None
         incoming_dir = None
         src_pub = None
+        archive = False
         raw = False
         key = None
         cert = None
@@ -389,13 +401,15 @@
         src_uri = os.environ.get("PKG_SRC", None)
 
         try:
-                opts, pargs = getopt.getopt(sys.argv[1:], "c:d:hkm:nrs:", 
-                    ["key=", "cert=", "newest", "raw"])
+                opts, pargs = getopt.getopt(sys.argv[1:], "ac:d:hkm:nrs:", 
+                    ["cert=", "key=", "newest", "raw"])
         except getopt.GetoptError, e:
                 usage(_("Illegal option -- %s") % e.opt)
 
         for opt, arg in opts:
-                if opt == "-c":
+                if opt == "-a":
+                        archive = True
+                elif opt == "-c":
                         cache_dir = arg
                 elif opt == "-d":
                         target = arg
@@ -421,7 +435,7 @@
                 elif opt == "--raw":
                         raw = True
                 elif opt == "--key":
-                        key= arg
+                        key = arg
                 elif opt == "--cert":
                         cert = arg
 
@@ -446,10 +460,10 @@
         xport_cfg.add_cache(cache_dir, readonly=False)
         xport_cfg.incoming_root = incoming_dir
 
-        # Since publication destionations may only have one repository
-        # configured per publisher, create destination as separate transport
-        # in case source and destination have identical publisher configuration
-        # but different repository endpoints.
+        # Since publication destinations may only have one repository configured
+        # per publisher, create destination as separate transport in case source
+        # and destination have identical publisher configuration but different
+        # repository endpoints.
         dest_xport, dest_xport_cfg = transport.setup_transport()
         dest_xport_cfg.add_cache(cache_dir, readonly=False)
         dest_xport_cfg.incoming_root = incoming_dir
@@ -458,7 +472,219 @@
         transport.setup_publisher(src_uri, "source", xport, xport_cfg,
             remote_prefix=True, ssl_key=key, ssl_cert=cert)
 
+        args = (pargs, target, list_newest, all_versions,
+            all_timestamps, keep_compressed, raw, recursive, dry_run,
+            dest_xport_cfg, src_uri)
+
+        if archive:
+                # Retrieving package data for archival requires a different mode
+                # of operation so gets its own routine.  Notably, it requires
+                # that all package data be retrieved before the archival process
+                # is started.
+                return archive_pkgs(*args)
+
+        # Normal package transfer allows operations on a per-package basis.
+        return transfer_pkgs(*args)
+
+def check_processed(any_matched, any_unmatched, total_processed):
+        # Reduce unmatched patterns to those that were unmatched for all
+        # publishers.
+        unmatched = {}
+        for pub_unmatched in any_unmatched:
+                if not pub_unmatched:
+                        # If any publisher matched all patterns, then treat
+                        # the operation as successful.
+                        unmatched = {}
+                        break
+
+                for k in pub_unmatched:
+                        unmatched.setdefault(k, set()).update(pub_unmatched[k])
+
+        for k in unmatched:
+                map(unmatched[k].discard, any_matched)
+
+        # Prune types of matching that didn't have any match failures.
+        for k, v in unmatched.items():
+                if not v:
+                        del unmatched[k]
+
+        if unmatched:
+                # If any match failures remain, abort with an error.
+                match_err = apx.InventoryException(**unmatched)
+                emsg(match_err)
+                if total_processed > 0:
+                        # Partial failure.
+                        abort(retcode=3)
+                abort()
+
+def archive_pkgs(pargs, target, list_newest, all_versions, all_timestamps,
+    keep_compresed, raw, recursive, dry_run, dest_xport_cfg, src_uri):
+        """Retrieve source package data completely and then archive it."""
+
+        global cache_dir, download_start, xport, xport_cfg, dest_xport, targ_pub
+
+        target = os.path.abspath(target)
+        if os.path.exists(target):
+                error(_("Target archive '%s' already "
+                    "exists.") % target)
+                abort()
+
+        # Open the archive early so that permissions failures, etc. can be
+        # detected before actual work is started.
+        pkg_arc = pkg.p5p.Archive(target, mode="w")
+
+        basedir = tempfile.mkdtemp(dir=temp_root,
+            prefix=global_settings.client_name + "-")
+        tmpdirs.append(basedir)
+
+        # Retrieve package data for all publishers.
         any_unmatched = []
+        any_matched = []
+        total_processed = 0
+        arc_bytes = 0
+        archive_list = []
+        for src_pub in xport_cfg.gen_publishers():
+                # Root must be per publisher on the off chance that multiple
+                # publishers have the same package.
+                xport_cfg.pkg_root = os.path.join(basedir, src_pub.prefix)
+
+                tracker = get_tracker()
+                msg(_("Retrieving packages for publisher %s ...") %
+                    src_pub.prefix)
+                if pargs == None or len(pargs) == 0:
+                        usage(_("must specify at least one pkgfmri"))
+
+                all_fmris = fetch_catalog(src_pub, tracker, xport)
+                fmri_arguments = pargs
+                matched_pats, matches, unmatched = expand_matching_fmris(
+                    all_fmris, fmri_arguments)
+
+                # Track anything that failed to match.
+                any_unmatched.append(unmatched)
+                any_matched.extend(matched_pats.keys())
+                if not matches:
+                        # No matches at all; nothing to do for this publisher.
+                        continue
+
+                fmri_list = prune(list(set(matches)), all_versions,
+                    all_timestamps)
+
+                if recursive:
+                        msg(_("Retrieving manifests for dependency "
+                            "evaluation ..."))
+                        tracker.evaluate_start()
+                        fmri_list = prune(get_dependencies(src_uri, fmri_list,
+                            xport_cfg, tracker), all_versions, all_timestamps)
+                        tracker.evaluate_done()
+
+                def get_basename(pfmri):
+                        open_time = pfmri.get_timestamp()
+                        return "%d_%s" % \
+                            (calendar.timegm(open_time.utctimetuple()),
+                            urllib.quote(str(pfmri), ""))
+
+                # First, retrieve the manifests and calculate package transfer
+                # sizes.
+                npkgs = len(fmri_list)
+                get_bytes = 0
+                get_files = 0
+
+                if not recursive:
+                        msg(_("Retrieving and evaluating %d package(s)...") %
+                            npkgs)
+
+                tracker.evaluate_start(npkgs=npkgs)
+                skipped = False
+                retrieve_list = []
+                while fmri_list:
+                        f = fmri_list.pop()
+
+                        m = get_manifest(f, xport_cfg)
+                        pkgdir = xport_cfg.get_pkg_dir(f)
+                        mfile = xport.multi_file_ni(src_pub, pkgdir,
+                            progtrack=tracker)
+         
+                        getb, getf, arcb, arccb = add_hashes_to_multi(m, mfile)
+                        get_bytes += getb
+                        get_files += getf
+
+                        # Since files are going into the archive, progress
+                        # can be tracked in terms of compressed bytes for
+                        # the package files themselves.
+                        arc_bytes += arccb
+
+                        # Also include the the manifest file itself in the
+                        # amount of bytes to archive.
+                        try:
+                                fs = os.stat(m.pathname)
+                                arc_bytes += fs.st_size
+                        except EnvironmentError, e:
+                                raise apx._convert_error(e)
+
+                        retrieve_list.append((f, mfile))
+                        if not dry_run:
+                                archive_list.append((f, m.pathname, pkgdir))
+                        tracker.evaluate_progress(fmri=f)
+
+                tracker.evaluate_done()
+
+                # Next, retrieve the content for this publisher's packages.
+                tracker.download_set_goal(len(retrieve_list), get_files,
+                    get_bytes)
+
+                if dry_run:
+                        # Don't call download_done here; it would cause an
+                        # assertion failure since nothing was downloaded.
+                        # Instead, call the method that simply finishes
+                        # up the progress output.
+                        tracker.dl_output_done()
+                        cleanup()
+                        continue
+
+                processed = 0
+                while retrieve_list:
+                        f, mfile = retrieve_list.pop()
+                        tracker.download_start_pkg(f.pkg_name)
+
+                        if mfile:
+                                download_start = True
+                                mfile.wait_files()
+
+                        # Nothing more to do for this package.
+                        tracker.download_end_pkg()
+
+                tracker.download_done()
+                tracker.reset()
+
+        # Check processed patterns and abort with failure if some were
+        # unmatched.
+        check_processed(any_matched, any_unmatched, total_processed)
+
+        if dry_run:
+                # Dump all temporary data.
+                cleanup()
+                return 0
+
+        # Now create archive and then archive retrieved package data.
+        while archive_list:
+                pfmri, mpath, pkgdir = archive_list.pop()
+                pkg_arc.add_package(pfmri, mpath, pkgdir)
+        pkg_arc.close(progtrack=tracker)
+
+        # Dump all temporary data.
+        cleanup()
+        return 0
+
+def transfer_pkgs(pargs, target, list_newest, all_versions, all_timestamps,
+    keep_compressed, raw, recursive, dry_run, dest_xport_cfg, src_uri):
+        """Retrieve source package data and optionally republish it as each
+        package is retrieved.
+        """
+
+        global cache_dir, download_start, xport, xport_cfg, dest_xport, targ_pub
+
+        any_unmatched = []
+        any_matched = []
         total_processed = 0
         for src_pub in xport_cfg.gen_publishers():
                 tracker = get_tracker()
@@ -517,7 +743,7 @@
                                 except trans.TransactionError, e:
                                         abort(err=e)
                 else:
-                        basedir = target
+                        basedir = target = os.path.abspath(target)
                         if not os.path.exists(basedir):
                                 try:
                                         os.makedirs(basedir, misc.PKG_DIR_MODE)
@@ -530,15 +756,17 @@
                 dest_xport_cfg.pkg_root = basedir
 
                 if republish:
-                        targ_fmris = fetch_catalog(targ_pub, tracker, dest_xport)
+                        targ_fmris = fetch_catalog(targ_pub, tracker,
+                            dest_xport)
 
                 all_fmris = fetch_catalog(src_pub, tracker, xport)
                 fmri_arguments = pargs
-                matches, unmatched = expand_matching_fmris(all_fmris,
-                    fmri_arguments)
+                matched_pats, matches, unmatched = expand_matching_fmris(
+                    all_fmris, fmri_arguments)
 
                 # Track anything that failed to match.
                 any_unmatched.append(unmatched)
+                any_matched.extend(matched_pats.keys())
                 if not matches:
                         # No matches at all; nothing to do for this publisher.
                         continue
@@ -551,7 +779,7 @@
                             "evaluation ..."))
                         tracker.evaluate_start()
                         fmri_list = prune(get_dependencies(src_uri, fmri_list,
-                            basedir, tracker), all_versions, all_timestamps)
+                            xport_cfg, tracker), all_versions, all_timestamps)
                         tracker.evaluate_done()
 
                 def get_basename(pfmri):
@@ -586,14 +814,18 @@
                                 skipped = True
                                 continue
 
-                        m = get_manifest(f, basedir)
+                        m = get_manifest(f, xport_cfg)
                         pkgdir = xport_cfg.get_pkg_dir(f)
                         mfile = xport.multi_file_ni(src_pub, pkgdir,
                             not keep_compressed, tracker)
          
-                        getb, sendb = add_hashes_to_multi(m, mfile)
+                        getb, getf, sendb, sendcb = add_hashes_to_multi(m,
+                            mfile)
                         get_bytes += getb
                         if republish:
+                                # For now, normal republication always uses
+                                # uncompressed data as already compressed data
+                                # is not supported for publication.
                                 send_bytes += sendb
 
                         retrieve_list.append((f, mfile))
@@ -624,12 +856,12 @@
                                 tracker.republish_end_pkg()
                                 continue
 
-                        m = get_manifest(f, basedir)
+                        m = get_manifest(f, xport_cfg)
 
                         # Get first line of original manifest so that inclusion
                         # of the scheme can be determined.
                         use_scheme = True
-                        contents = get_manifest(f, basedir, contents=True)
+                        contents = get_manifest(f, xport_cfg, contents=True)
                         if contents.splitlines()[0].find("pkg:/") == -1:
                                 use_scheme = False
 
@@ -702,38 +934,9 @@
                 # Prevent further use.
                 targ_pub = None
 
-        # Find the intersection of patterns that failed to match.
-        unmatched = {}
-        for pub_unmatched in any_unmatched:
-                if not pub_unmatched:
-                        # If any publisher matched all patterns, then treat
-                        # the operation as successful.
-                        unmatched = {}
-                        break
-
-                # Otherwise, find the intersection of unmatched patterns so far.
-                for k in pub_unmatched:
-                        try:
-                                src = set(unmatched[k])
-                                unmatched[k] = \
-                                    src.intersection(pub_unmatched[k])
-                        except KeyError:
-                                # Nothing to intersect with; assign instead.
-                                unmatched[k] = pub_unmatched[k]
-
-        # Prune types of matching that didn't have any match failures.
-        for k, v in unmatched.items():
-                if not v:
-                        del unmatched[k]
-
-        if unmatched:
-                # If any match failures remain, abort with an error.
-                match_err = apx.InventoryException(**unmatched)
-                emsg(match_err)
-                if total_processed > 0:
-                        # Partial failure.
-                        abort(retcode=3)
-                abort()
+        # Check processed patterns and abort with failure if some were
+        # unmatched.
+        check_processed(any_matched, any_unmatched, total_processed)
 
         # Dump all temporary data.
         cleanup()
@@ -746,9 +949,11 @@
 
         try:
                 __ret = main_func()
-        except (pkg.actions.ActionError, trans.TransactionError,
-            RuntimeError, apx.TransportError, apx.BadRepositoryURI,
-            apx.UnsupportedRepositoryURI), _e:
+        except (KeyboardInterrupt, apx.CanceledException):
+                cleanup(True)
+                __ret = 1
+        except (pkg.actions.ActionError, trans.TransactionError, RuntimeError,
+            apx.ApiException), _e:
                 error(_e)
                 cleanup(True)
                 __ret = 1
@@ -757,9 +962,6 @@
                 # possible further broken pipe (EPIPE) errors.
                 cleanup(False)
                 __ret = 1
-        except (KeyboardInterrupt, apx.CanceledException):
-                cleanup(True)
-                __ret = 1
         except SystemExit, _e:
                 cleanup(False)
                 raise _e
--- a/src/tests/api/t_api.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/tests/api/t_api.py	Wed Feb 09 18:43:21 2011 -0800
@@ -41,7 +41,7 @@
 import time
 import unittest
 
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 PKG_CLIENT_NAME = "pkg"
 
 class TestPkgApi(pkg5unittest.SingleDepotTestCase):
--- a/src/tests/api/t_api_list.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/tests/api/t_api_list.py	Wed Feb 09 18:43:21 2011 -0800
@@ -44,7 +44,7 @@
 import pkg.misc as misc
 import pkg.version as version
 
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 PKG_CLIENT_NAME = "pkg"
 
 class TestApiList(pkg5unittest.ManyDepotTestCase):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/api/t_p5p.py	Wed Feb 09 18:43:21 2011 -0800
@@ -0,0 +1,969 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
+#
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+import pkg5unittest
+
+import difflib
+import errno
+import unittest
+import os
+import pkg.catalog
+import pkg.client.progress
+import pkg.fmri
+import pkg.misc
+import pkg.p5p
+import pkg.pkgtarfile as ptf
+import pkg.portable as portable
+import shutil
+import sys
+import tarfile as tf
+import tempfile
+
+
+class TestP5P(pkg5unittest.SingleDepotTestCase):
+        """Class to test the functionality of the pkg.p5p module."""
+
+        # Don't recreate repository and publish packages for every test.
+        persistent_setup = True
+
+        pkgs = """
+            open pkg://test/[email protected]
+            add set name=pkg.summary value="Example package foo."
+            add dir mode=0755 owner=root group=bin path=lib
+            add dir mode=0755 owner=root group=bin path=usr
+            add dir mode=0755 owner=root group=bin path=usr/bin
+            add dir mode=0755 owner=root group=bin path=usr/local
+            add dir mode=0755 owner=root group=bin path=usr/local/bin
+            add dir mode=0755 owner=root group=bin path=usr/share
+            add dir mode=0755 owner=root group=bin path=usr/share/doc
+            add dir mode=0755 owner=root group=bin path=usr/share/doc/foo
+            add dir mode=0755 owner=root group=bin path=usr/share/man
+            add dir mode=0755 owner=root group=bin path=usr/share/man/man1
+            add file tmp/foo mode=0755 owner=root group=bin path=usr/bin/foo
+            add file tmp/libfoo.so.1 mode=0755 owner=root group=bin path=lib/libfoo.so.1
+            add file tmp/foo.1 mode=0444 owner=root group=bin path=usr/share/man/man1/foo.1
+            add file tmp/README mode=0444 owner=root group=bin path=/usr/share/doc/foo/README
+            add link path=usr/local/bin/soft-foo target=usr/bin/foo
+            add hardlink path=usr/local/bin/hard-foo target=/usr/bin/foo
+            close
+            open pkg://test/[email protected]
+            add dir mode=0755 owner=root group=bin path=usr/bin
+            add set name=authorized.species value=bobcat
+            close
+            open pkg://test2/[email protected]
+            add set name=pkg.summary value="Example package quux."
+            add dir mode=0755 owner=root group=bin path=usr
+            add dir mode=0755 owner=root group=bin path=usr/bin
+            add file tmp/quux mode=0755 owner=root group=bin path=usr/bin/quux
+            close """
+
+        misc_files = ["tmp/foo", "tmp/libfoo.so.1", "tmp/foo.1", "tmp/README",
+            "tmp/LICENSE", "tmp/quux"]
+
+        def seed_ta_dir(self, certs, dest_dir=None):
+                if isinstance(certs, basestring):
+                        certs = [certs]
+                if not dest_dir:
+                        dest_dir = self.ta_dir
+                self.assert_(dest_dir)
+                self.assert_(self.raw_trust_anchor_dir)
+                for c in certs:
+                        name = "%s_cert.pem" % c
+                        portable.copyfile(
+                            os.path.join(self.raw_trust_anchor_dir, name),
+                            os.path.join(dest_dir, name))
+
+        def image_create(self, *args, **kwargs):
+                pkg5unittest.SingleDepotTestCase.image_create(self,
+                    *args, **kwargs)
+                self.ta_dir = os.path.join(self.img_path, "etc/certs/CA")
+                os.makedirs(self.ta_dir)
+
+        def setUp(self):
+                pkg5unittest.SingleDepotTestCase.setUp(self)
+                self.make_misc_files(self.misc_files)
+
+                # Setup base test paths.
+                self.path_to_certs = os.path.join(self.ro_data_root,
+                    "signing_certs", "produced")
+                self.keys_dir = os.path.join(self.path_to_certs, "keys")
+                self.cs_dir = os.path.join(self.path_to_certs,
+                    "code_signing_certs")
+                self.chain_certs_dir = os.path.join(self.path_to_certs,
+                    "chain_certs")
+                self.pub_cas_dir = os.path.join(self.path_to_certs,
+                    "publisher_cas")
+                self.inter_certs_dir = os.path.join(self.path_to_certs,
+                    "inter_certs")
+                self.raw_trust_anchor_dir = os.path.join(self.path_to_certs,
+                    "trust_anchors")
+                self.crl_dir = os.path.join(self.path_to_certs, "crl")
+                self.ta_dir = None
+
+                # Publish packages needed for tests.
+                plist = self.pkgsend_bulk(self.rurl, self.pkgs)
+
+                # Stash published package FMRIs away for easy access by tests.
+                self.foo = pkg.fmri.PkgFmri(plist[0])
+                self.signed = pkg.fmri.PkgFmri(plist[1])
+                self.quux = pkg.fmri.PkgFmri(plist[2])
+
+                # Sign the 'signed' package.
+                r = self.get_repo(self.dcs[1].get_repodir())
+                r.add_signing_certs([os.path.join(self.pub_cas_dir,
+                    "pubCA1_ta1_cert.pem")], ca=True)
+                r.add_signing_certs([os.path.join(self.pub_cas_dir,
+                    "pubCA1_ta3_cert.pem")], ca=True)
+                r.add_signing_certs([os.path.join(self.inter_certs_dir,
+                    "i1_ta1_cert.pem")], ca=False)
+                r.add_signing_certs([os.path.join(self.inter_certs_dir,
+                    "i2_ta1_cert.pem")], ca=False)
+
+                sign_args = "-k %(key)s -c %(cert)s -i %(i1)s -i %(i2)s " \
+                    "%(pkg)s" % {
+                    "key": os.path.join(self.keys_dir, "cs1_pubCA1_key.pem"),
+                    "cert": os.path.join(self.cs_dir, "cs1_pubCA1_cert.pem"),
+                    "i1": os.path.join(self.chain_certs_dir,
+                        "ch1_pubCA1_cert.pem"),
+                    "i2": os.path.join(self.chain_certs_dir,
+                        "ch2_pubCA1_cert.pem"),
+                    "pkg": self.signed
+                }
+                self.pkgsign(self.rurl, sign_args)
+
+                # This is just a test assertion to verify that the package
+                # was signed as expected.
+                self.image_create(self.rurl)
+                self.seed_ta_dir("ta1")
+                self.pkg("set-property signature-policy verify")
+                self.pkg("install signed")
+                self.image_destroy()
+
+                # Expected list of archive members for archive containing foo.
+                self.foo_expected = [
+                    "pkg5.index.0.gz",
+                    "publisher",
+                    "publisher/test",
+                    "publisher/test/pkg",
+                    "publisher/test/pkg/foo",
+                    "publisher/test/pkg/%s" % self.foo.get_dir_path(),
+                    "publisher/test/file",
+                    "publisher/test/file/b2",
+                    "publisher/test/file/b2/b265f2ec87c4a55eb2b6b4c926e7c65f7247a27e",
+                    "publisher/test/file/a2",
+                    "publisher/test/file/a2/a285ada5f3cae14ea00e97a8d99bd3e357cb0dca",
+                    "publisher/test/file/0a",
+                    "publisher/test/file/0a/0acf1107d31f3bab406f8611b21b8fade78ac874",
+                    "publisher/test/file/dc",
+                    "publisher/test/file/dc/dc84bd4b606fe43fc892eb245d9602b67f8cba38",
+                    "pkg5.repository",
+                ]
+
+                # Expected list of archive members for archive containing foo
+                # and quux (sorted).
+                self.multi_expected = [
+                    "pkg5.index.0.gz",
+                    "pkg5.repository",
+                    "publisher",
+                    "publisher/test",
+                    "publisher/test/file",
+                    "publisher/test/file/0a",
+                    "publisher/test/file/0a/0acf1107d31f3bab406f8611b21b8fade78ac874",
+                    "publisher/test/file/4e",
+                    "publisher/test/file/4e/4ee36c0da7f97dd367d36095c4fcac014f8b2ec4",
+                    "publisher/test/file/6f",
+                    "publisher/test/file/6f/6fadc7b6ba9db7705b3833416447ba5daebe1478",
+                    "publisher/test/file/93",
+                    "publisher/test/file/93/93911122ac112e5e4cbcbe95d81c9cc5299e239c",
+                    "publisher/test/file/96",
+                    "publisher/test/file/96/965dad893f0173a84cf112e7160aa9a2dc8ec2d3",
+                    "publisher/test/file/a2",
+                    "publisher/test/file/a2/a285ada5f3cae14ea00e97a8d99bd3e357cb0dca",
+                    "publisher/test/file/b2",
+                    "publisher/test/file/b2/b265f2ec87c4a55eb2b6b4c926e7c65f7247a27e",
+                    "publisher/test/file/d4",
+                    "publisher/test/file/d4/d43daefacfba6d0c178dd37178e650bb606936f4",
+                    "publisher/test/file/d8",
+                    "publisher/test/file/d8/d851faf1de264d9a04527cc4b7fd2e2daef2cfdd",
+                    "publisher/test/file/dc",
+                    "publisher/test/file/dc/dc84bd4b606fe43fc892eb245d9602b67f8cba38",
+                    "publisher/test/file/e2",
+                    "publisher/test/file/e2/e2c1fd4b0d66150de3d2b28195603c69dfd792e8",
+                    "publisher/test/pkg",
+                    "publisher/test/pkg/foo",
+                    "publisher/test/pkg/%s" % self.foo.get_dir_path(),
+                    "publisher/test/pkg/signed",
+                    "publisher/test/pkg/%s" % self.signed.get_dir_path(),
+                    "publisher/test/pub.p5i",
+                    "publisher/test2",
+                    "publisher/test2/file",
+                    "publisher/test2/file/80",
+                    "publisher/test2/file/80/801eebbfe8c526bf092d98741d4228e4d0fc99ae",
+                    "publisher/test2/pkg",
+                    "publisher/test2/pkg/quux",
+                    "publisher/test2/pkg/%s" % self.quux.get_dir_path(),
+                ]
+
+        def test_00_create(self):
+                """Verify that archive creation works as expected."""
+
+                # Verify that an empty package archive can be created and that
+                # the resulting archive is of the correct type.
+                arc_path = os.path.join(self.test_root, "empty.p5p")
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+                self.assertEqual(arc.pathname, arc_path)
+                arc.close()
+
+                # Verify archive exists and use the tarfile module to read the
+                # archive so that the implementation can be verified.
+                assert os.path.exists(arc_path)
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                fm = arc.firstmember
+                self.assertEqual(fm.name, "pkg5.index.0.gz")
+                comment = fm.pax_headers.get("comment", "")
+                self.assertEqual(comment, "pkg5.archive.version.0")
+
+                # Verify basic expected content exists.
+                expected = ["pkg5.index.0.gz", "publisher", "pkg5.repository"]
+                actual = [m.name for m in arc.getmembers()]
+                self.assertEqualDiff(expected, actual)
+
+                # Destroy the archive.
+                os.unlink(arc_path)
+
+        def test_01_add(self):
+                """Verify that add() works as expected."""
+
+                # Prep the archive.
+                arc_path = os.path.join(self.test_root, "add.p5p")
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+
+                # add() permits addition of arbitrary files (intentionally);
+                # it is also the routine that higher-level functions to add
+                # package content use internally.  Because of that, this
+                # function does not strictly need standalone testing, but it
+                # helps ensure all code paths for add() are tested.
+                arc.add(self.test_root)
+                tmp_root = os.path.join(self.test_root, "tmp")
+                arc.add(tmp_root)
+
+                for f in self.misc_files:
+                        src = os.path.join(self.test_root, f)
+
+                        # Ensure files are read-only mode so that file perm
+                        # normalization can be tested.
+                        os.chmod(src, pkg.misc.PKG_RO_FILE_MODE)
+                        arc.add(src)
+
+                # Write out archive.
+                arc.close()
+
+                # Now open the archive and iterate through its contents and
+                # verify that each member has the expected characteristics.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+
+                members = [m for m in arc.getmembers()]
+
+                # Should be 11 files including package archive index and three
+                # directories.
+                actual = [m.name for m in members]
+                self.assertEqual(len(actual), 11)
+                expected = ["pkg5.index.0.gz", "publisher",
+                    pkg.misc.relpath(self.test_root, "/"),
+                    pkg.misc.relpath(tmp_root, "/")
+                ]
+                expected.extend(
+                    pkg.misc.relpath(os.path.join(self.test_root, e), "/")
+                    for e in self.misc_files
+                )
+                expected.append("pkg5.repository")
+                self.assertEqualDiff(expected, actual)
+
+                for member in members:
+                        # All archive members should be a file or directory.
+                        self.assert_(member.isreg() or member.isdir())
+
+                        if member.name == "pkg5.index.0.gz":
+                                assert member.isreg()
+                                comment = member.pax_headers.get("comment", "")
+                                self.assertEqual(comment,
+                                    "pkg5.archive.version.0")
+                                continue
+
+                        if member.isdir():
+                                # Verify directories were added with expected
+                                # mode.
+                                self.assertEqual(oct(member.mode),
+                                    oct(pkg.misc.PKG_DIR_MODE))
+                        elif member.isfile():
+                                # Verify files were added with expected mode.
+                                self.assertEqual(oct(member.mode),
+                                    oct(pkg.misc.PKG_FILE_MODE))
+
+                        # Verify files and directories have expected ownership.
+                        self.assertEqual(member.uname, "root")
+                        self.assertEqual(member.gname, "root")
+                        self.assertEqual(member.uid, 0)
+                        self.assertEqual(member.gid, 0)
+
+                os.unlink(arc_path)
+
+        def test_02_add_package(self):
+                """Verify that pkg(5) archive creation using add_package() works
+                as expected.
+                """
+
+                # Get repository.
+                repo = self.get_repo(self.dc.get_repodir())
+
+                # Create a directory and copy package files from repository to
+                # it (this is how pkgrecv stores content during republication
+                # or when using --raw).
+                dfroot = os.path.join(self.test_root, "pfiles")
+                os.mkdir(dfroot, pkg.misc.PKG_DIR_MODE)
+
+                foo_path = os.path.join(dfroot, "foo.p5m")
+                portable.copyfile(repo.manifest(self.foo), foo_path)
+
+                signed_path = os.path.join(dfroot, "signed.p5m")
+                portable.copyfile(repo.manifest(self.signed), signed_path)
+
+                quux_path = os.path.join(dfroot, "quux.p5m")
+                portable.copyfile(repo.manifest(self.quux), quux_path)
+
+                for rstore in repo.rstores:
+                        for dirpath, dirnames, filenames in os.walk(
+                            rstore.file_root):
+                                if not filenames:
+                                        continue
+                                for f in filenames:
+                                        portable.copyfile(
+                                            os.path.join(dirpath, f),
+                                            os.path.join(dfroot, f))
+
+                # Prep the archive.
+                progtrack = pkg.client.progress.QuietProgressTracker()
+                arc_path = os.path.join(self.test_root, "add_package.p5p")
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+
+                # Create an archive with just one package.
+                arc.add_package(self.foo, foo_path, dfroot)
+                arc.close(progtrack=progtrack)
+
+                # Verify the result.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                expected = self.foo_expected
+                actual = [m.name for m in arc.getmembers()]
+                self.assertEqualDiff(expected, actual)
+
+                # Prep a new archive.
+                os.unlink(arc_path)
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+
+                # Create an archive with multiple packages.
+                # (Don't use progtrack this time.)
+                arc.add_package(self.foo, foo_path, dfroot)
+                arc.add_package(self.signed, signed_path, dfroot)
+                arc.add_package(self.quux, quux_path, dfroot)
+
+                # Add publisher cert files.  (This has to be done manually
+                # since add_package() was used instead of add_repo_package().)
+                p = repo.get_publisher("test")
+                arc.add_signing_certs(p.prefix, [
+                    repo.file(hsh, pub=p.prefix)
+                    for hsh in p.signing_ca_certs
+                ], True)
+                arc.add_signing_certs(p.prefix, [
+                    repo.file(hsh, pub=p.prefix)
+                    for hsh in p.intermediate_certs
+                ], False)
+                arc.close()
+
+                # Verify the result.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                expected = self.multi_expected
+                actual = sorted(m.name for m in arc.getmembers())
+                self.assertEqualDiff(expected, actual)
+
+                os.unlink(arc_path)
+                os.unlink(foo_path)
+                os.unlink(quux_path)
+                os.unlink(signed_path)
+
+        def test_03_add_repo_package(self):
+                """Verify that pkg(5) archive creation using add_repo_package()
+                works as expected.
+                """
+
+                progtrack = pkg.client.progress.QuietProgressTracker()
+
+                # Get repository.
+                repo = self.get_repo(self.dc.get_repodir())
+
+                # Create an archive with just one package.
+                arc_path = os.path.join(self.test_root, "add_repo_package.p5p")
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+                arc.add_repo_package(self.foo, repo)
+                arc.close(progtrack=progtrack)
+
+                # Verify the result.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                expected = self.foo_expected
+                actual = [m.name for m in arc.getmembers()]
+                self.assertEqualDiff(expected, actual)
+
+                # Prep a new archive.
+                os.unlink(arc_path)
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+
+                # Create an archive with multiple packages.
+                # (Don't use progtrack this time.)
+                arc.add_repo_package(self.foo, repo)
+                arc.add_repo_package(self.signed, repo)
+                arc.add_repo_package(self.quux, repo)
+                arc.close()
+
+                # Verify the result.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                expected = self.multi_expected
+                actual = sorted(m.name for m in arc.getmembers())
+                self.assertEqualDiff(expected, actual)
+
+                os.unlink(arc_path)
+
+        def __verify_manifest_sig(self, repo, pfmri, content):
+                """Helper method to verify that the given manifest signature
+                data matches that of the corresponding manifest in a repository.
+                """
+
+                sm = pkg.manifest.Manifest(pfmri=pfmri)
+                sm.set_content(pathname=repo.manifest(pfmri), signatures=True)
+
+                if isinstance(content, basestring):
+                        dm = pkg.manifest.Manifest()
+                        dm.set_content(content=content, signatures=True)
+                else:
+                        dm = content
+                self.assertEqualDiff(sm.signatures, dm.signatures)
+
+        def __verify_manifest_file_sig(self, repo, pfmri, target):
+                """Helper method to verify that target manifest's signature data
+                matches that of the corresponding manifest in a repository.
+                """
+
+                sm = pkg.manifest.Manifest(pfmri=pfmri)
+                sm.set_content(pathname=repo.manifest(pfmri), signatures=True)
+
+                dm = pkg.manifest.Manifest()
+                dm.set_content(pathname=target, signatures=True)
+                self.assertEqualDiff(sm.signatures, dm.signatures)
+
+        def __verify_extract(self, repo, arc_path, hashes, ext_dir):
+                """Helper method to test extraction and retrieval functionality.
+                """
+
+                arc = pkg.p5p.Archive(arc_path, mode="r")
+
+                #
+                # Verify behaviour of extract_package_manifest().
+                #
+
+                # Test bad FMRI.
+                self.assertRaises(pkg.fmri.IllegalFmri,
+                    arc.extract_package_manifest, "pkg:/^[email protected],5.11",
+                    ext_dir)
+
+                # Test unqualified (no publisher) FMRI.
+                self.assertRaises(AssertionError,
+                    arc.extract_package_manifest, "pkg:/[email protected],5.11",
+                    ext_dir)
+
+                # Test unknown FMRI.
+                self.assertRaisesStringify(pkg.p5p.UnknownPackageManifest,
+                    arc.extract_package_manifest, "pkg://test/[email protected],5.11",
+                    ext_dir)
+
+                # Test extraction when not specifying filename.
+                fpath = os.path.join(ext_dir, self.foo.get_dir_path())
+                arc.extract_package_manifest(self.foo, ext_dir)
+                self.__verify_manifest_file_sig(repo, self.foo, fpath)
+
+                # Test extraction specifying directory that does not exist.
+                shutil.rmtree(ext_dir)
+                arc.extract_package_manifest(self.foo, ext_dir,
+                    filename="foo.p5m")
+                self.__verify_manifest_file_sig(repo, self.foo,
+                    os.path.join(ext_dir, "foo.p5m"))
+
+                # Test extraction specifying directory that already exists.
+                arc.extract_package_manifest(self.quux, ext_dir,
+                    filename="quux.p5m")
+                self.__verify_manifest_file_sig(repo, self.quux,
+                    os.path.join(ext_dir, "quux.p5m"))
+
+                # Test extraction in the case that manifest already exists.
+                arc.extract_package_manifest(self.quux, ext_dir,
+                    filename="quux.p5m")
+                self.__verify_manifest_file_sig(repo, self.quux,
+                    os.path.join(ext_dir, "quux.p5m"))
+
+                #
+                # Verify behaviour of extract_package_files().
+                #
+                arc.close()
+                arc = pkg.p5p.Archive(arc_path, mode="r")
+                shutil.rmtree(ext_dir)
+
+                # Test unknown hashes.
+                self.assertRaisesStringify(pkg.p5p.UnknownArchiveFiles,
+                    arc.extract_package_files, ["a", "b", "c"], ext_dir)
+
+                # Test extraction specifying directory that does not exist.
+                arc.extract_package_files(hashes["all"], ext_dir)
+                for h in hashes["all"]:
+                        fpath = os.path.join(ext_dir, h)
+                        assert os.path.exists(fpath)
+
+                        # Now change mode to readonly.
+                        os.chmod(fpath, pkg.misc.PKG_RO_FILE_MODE)
+
+                # Test extraction in the case that files already exist
+                # (and those files are readonly).
+                arc.extract_package_files(hashes["all"], ext_dir)
+                for h in hashes["all"]:
+                        assert os.path.exists(os.path.join(ext_dir, h))
+
+                # Test extraction when publisher is specified.
+                shutil.rmtree(ext_dir)
+                arc.extract_package_files(hashes["test"], ext_dir, pub="test")
+                for h in hashes["test"]:
+                        assert os.path.exists(os.path.join(ext_dir, h))
+
+                #
+                # Verify behaviour of extract_to().
+                #
+                arc.close()
+                arc = pkg.p5p.Archive(arc_path, mode="r")
+                shutil.rmtree(ext_dir)
+
+                # Test unknown file.
+                self.assertRaisesStringify(pkg.p5p.UnknownArchiveFiles,
+                    arc.extract_to, "no/such/file", ext_dir)
+
+                # Test extraction when not specifying filename (archive
+                # member should be extracted into target directory using
+                # full path in archive; that is, the target dir is pre-
+                # pended).
+                for pub in hashes:
+                        if pub == "all":
+                                continue
+                        for h in hashes[pub]:
+                                arcname = os.path.join("publisher", pub, "file",
+                                    h[:2], h)
+                                arc.extract_to(arcname, ext_dir)
+
+                                fpath = os.path.join(ext_dir, arcname)
+                                assert os.path.exists(fpath)
+
+                # Test extraction specifying directory that does not exist.
+                shutil.rmtree(ext_dir)
+                for pub in hashes:
+                        if pub == "all":
+                                continue
+                        for h in hashes[pub]:
+                                arcname = os.path.join("publisher", pub, "file",
+                                    h[:2], h)
+                                arc.extract_to(arcname, ext_dir, filename=h)
+
+                                fpath = os.path.join(ext_dir, h)
+                                assert os.path.exists(fpath)
+
+                                # Now change mode to readonly.
+                                os.chmod(fpath, pkg.misc.PKG_RO_FILE_MODE)
+
+                # Test extraction in the case that files already exist
+                # (and those files are readonly).
+                for pub in hashes:
+                        if pub == "all":
+                                continue
+                        for h in hashes[pub]:
+                                arcname = os.path.join("publisher", pub, "file",
+                                    h[:2], h)
+                                arc.extract_to(arcname, ext_dir, filename=h)
+
+                                fpath = os.path.join(ext_dir, h)
+                                assert os.path.exists(fpath)
+
+                #
+                # Verify behaviour of get_file().
+                #
+                arc.close()
+                arc = pkg.p5p.Archive(arc_path, mode="r")
+
+                # Test behaviour for non-existent file.
+                self.assertRaisesStringify(pkg.p5p.UnknownArchiveFiles,
+                    arc.get_file, "no/such/file")
+
+                # Test that archived content retrieved is identical.
+                arcname = os.path.join("publisher", self.foo.publisher, "pkg",
+                    self.foo.get_dir_path())
+                fobj = arc.get_file(arcname)
+                self.__verify_manifest_sig(repo, self.foo, fobj.read())
+                fobj.close()
+
+                #
+                # Verify behaviour of get_package_file().
+                #
+                arc.close()
+                arc = pkg.p5p.Archive(arc_path, mode="r")
+
+                # Test behaviour when specifying publisher.
+                nullf = open(os.devnull, "wb")
+                for h in hashes["test"]:
+                        fobj = arc.get_package_file(h, pub="test")
+                        uchash = pkg.misc.gunzip_from_stream(fobj, nullf)
+                        self.assertEqual(uchash, h)
+                        fobj.close()
+
+                # Test behaviour when not specifying publisher.
+                for h in hashes["test"]:
+                        fobj = arc.get_package_file(h)
+                        uchash = pkg.misc.gunzip_from_stream(fobj, nullf)
+                        self.assertEqual(uchash, h)
+                        fobj.close()
+
+                #
+                # Verify behaviour of get_package_manifest().
+                #
+                arc.close()
+                arc = pkg.p5p.Archive(arc_path, mode="r")
+
+                # Test bad FMRI.
+                self.assertRaises(pkg.fmri.IllegalFmri,
+                    arc.get_package_manifest, "pkg:/^[email protected],5.11")
+
+                # Test unqualified (no publisher) FMRI.
+                self.assertRaises(AssertionError,
+                    arc.get_package_manifest, "pkg:/[email protected],5.11")
+
+                # Test unknown FMRI.
+                self.assertRaisesStringify(pkg.p5p.UnknownPackageManifest,
+                    arc.get_package_manifest, "pkg://test/[email protected],5.11")
+
+                # Test that archived content retrieved is identical.
+                mobj = arc.get_package_manifest(self.foo)
+                self.__verify_manifest_sig(repo, self.foo, mobj)
+
+                mobj = arc.get_package_manifest(self.signed)
+                self.__verify_manifest_sig(repo, self.signed, mobj)
+
+                #
+                # Verify behaviour of extract_catalog1().
+                #
+                arc.close()
+                arc = pkg.p5p.Archive(arc_path, mode="r")
+                ext_tmp_dir = tempfile.mkdtemp(dir=self.test_root)
+                def verify_catalog(pub, pfmris):
+                        for pname in ("catalog.attrs", "catalog.base.C",
+                            "catalog.dependency.C", "catalog.summary.C"):
+                                expected = os.path.join(ext_tmp_dir, pname)
+                                try:
+                                        arc.extract_catalog1(pname, ext_tmp_dir,
+                                            pub=pub)
+                                except pkg.p5p.UnknownArchiveFiles:
+                                        if pname == "catalog.dependency.C":
+                                                # No dependencies, so exeception
+                                                # is only expected for this.
+                                                continue
+                                        raise
+
+                                assert os.path.exists(expected)
+
+                        cat = pkg.catalog.Catalog(meta_root=ext_tmp_dir)
+                        self.assertEqual([f for f in cat.fmris()], pfmris)
+
+                verify_catalog("test", [self.foo, self.signed])
+                shutil.rmtree(ext_tmp_dir)
+                os.mkdir(ext_tmp_dir)
+
+                verify_catalog("test2", [self.quux])
+                shutil.rmtree(ext_tmp_dir)
+                return arc
+
+        def test_04_extract(self):
+                """Verify that pkg(5) archive extraction methods work as
+                expected.
+                """
+
+                # Get repository.
+                repo = self.get_repo(self.dc.get_repodir())
+
+                # Create an archive with a few packages.
+                arc_path = os.path.join(self.test_root, "retrieve.p5p")
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+                arc.add_repo_package(self.foo, repo)
+                arc.add_repo_package(self.signed, repo)
+                arc.add_repo_package(self.quux, repo)
+                arc.close()
+
+                # Get list of file hashes.
+                hashes = { "all": set() }
+                for rstore in repo.rstores:
+                        for dirpath, dirnames, filenames in os.walk(
+                            rstore.file_root):
+                                if not filenames:
+                                        continue
+                                hashes["all"].update(filenames)
+                                hashes.setdefault(rstore.publisher,
+                                    set()).update(filenames)
+
+                # Extraction directory for testing.
+                ext_dir = os.path.join(self.test_root, "extracted")
+
+                # First, verify behaviour using archive created using
+                # pkg(5) archive class.
+                arc = self.__verify_extract(repo, arc_path, hashes, ext_dir)
+                arc.close()
+
+                # Now extract everything from the archive and create
+                # a new archive using the tarfile class, and verify
+                # that the pkg(5) archive class can still extract
+                # and access the contents as expected even though
+                # the index file isn't marked with the appropriate
+                # pax headers (and so should be ignored since it's
+                # also invalid).
+                shutil.rmtree(ext_dir)
+
+                # Extract all of the existing content.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                arc.extractall(ext_dir)
+                arc.close()
+
+                # Create a new archive.
+                os.unlink(arc_path)
+                arc = ptf.PkgTarFile(name=arc_path, mode="w")
+
+                def add_entry(src):
+                        fpath = os.path.join(dirpath, src)
+                        arcname = pkg.misc.relpath(fpath, ext_dir)
+                        arc.add(name=fpath, arcname=arcname,
+                            recursive=False)
+
+                for dirpath, dirnames, filenames in os.walk(ext_dir):
+                        map(add_entry, filenames)
+                        map(add_entry, dirnames)
+                arc.close()
+
+                # Verify that archive has expected contents.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                expected = self.multi_expected
+                actual = sorted(m.name for m in arc.getmembers())
+                self.assertEqualDiff(expected, actual)
+                arc.close()
+
+                # Verify pkg(5) archive class extraction behaviour using
+                # the new archive.
+                arc = self.__verify_extract(repo, arc_path, hashes, ext_dir)
+                arc.close()
+
+                # Extract all of the existing content.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                arc.extractall(ext_dir)
+                arc.close()
+       
+                # Now verify archive can still be used when index file
+                # is omitted.
+                os.unlink(arc_path)
+                arc = ptf.PkgTarFile(name=arc_path, mode="w")
+                for dirpath, dirnames, filenames in os.walk(ext_dir):
+                        map(add_entry,
+                            [f for f in filenames if f != "pkg5.index.0.gz"])
+                        map(add_entry, dirnames)
+                arc.close()
+
+                # Verify pkg(5) archive class extraction behaviour using
+                # the new archive.
+                arc = self.__verify_extract(repo, arc_path, hashes, ext_dir)
+                arc.close()
+
+        def test_05_invalid(self):
+                """Verify that pkg(5) archive class handles broken archives
+                and items that aren't archives as expected."""
+
+                arc_path = os.path.join(self.test_root, "nosucharchive.p5p")
+
+                #
+                # Check that no archive is handled.
+                #
+                self.assertRaisesStringify(pkg.p5p.InvalidArchive,
+                    pkg.p5p.Archive, arc_path, mode="r")
+
+                #
+                # Check that empty archive file is handled.
+                #
+                arc_path = os.path.join(self.test_root, "retrieve.p5p")
+                open(arc_path, "wb").close()
+                self.assertRaisesStringify(pkg.p5p.InvalidArchive,
+                    pkg.p5p.Archive, arc_path, mode="r")
+                os.unlink(arc_path)
+
+                #
+                # Check that invalid archive file is handled.
+                #
+                with open(arc_path, "wb") as f:
+                        f.write("not_a_valid_archive")
+                self.assertRaisesStringify(pkg.p5p.InvalidArchive,
+                    pkg.p5p.Archive, arc_path, mode="r")
+                os.unlink(arc_path)
+
+                #
+                # Check that a truncated archive is handled.
+                #
+                repo = self.get_repo(self.dc.get_repodir())
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+                arc.add_repo_package(self.foo, repo)
+                arc.add_repo_package(self.signed, repo)
+                arc.add_repo_package(self.quux, repo)
+                arc.close()
+
+                #
+                # Check that truncated archives, or archives with invalid
+                # indexes are handled as expected.
+                #
+
+                # Determine where to truncate archive by looking for specific
+                # package file and then setting truncate location to halfway
+                # through data for file.
+                arc = ptf.PkgTarFile(name=arc_path, mode="r")
+                idx_data_offset = 0
+                src_offset = 0
+                src_bytes = 0
+                dest_offset = 0
+                trunc_sz = 0
+                src_fhash = "b265f2ec87c4a55eb2b6b4c926e7c65f7247a27e"
+                dest_fhash = "801eebbfe8c526bf092d98741d4228e4d0fc99ae"
+                for m in arc.getmembers():
+                        if m.name.endswith("/" + dest_fhash):
+                                dest_offset = m.offset
+                                trunc_sz = m.offset_data + int(m.size / 2)
+                        elif m.name.endswith("pkg5.index.0.gz"):
+                                idx_data_offset = m.offset_data
+                        elif m.name.endswith("/" + src_fhash):
+                                # Calculate size of source entry.
+                                src_bytes = m.offset_data - m.offset
+                                blocks, rem = divmod(m.size, tf.BLOCKSIZE)
+                                if rem > 0:
+                                        blocks += 1
+                                src_bytes += blocks * tf.BLOCKSIZE
+                                src_offset = m.offset
+
+                arc.close()
+
+                # Test truncated archive case.
+                bad_arc_path = os.path.join(self.test_root, "bad_arc.p5p")
+                portable.copyfile(arc_path, bad_arc_path)
+
+                self.debug("%s size: %d truncate: %d" % (arc_path,
+                    os.stat(arc_path).st_size, trunc_sz))
+                with open(bad_arc_path, "ab+") as f:
+                        f.truncate(trunc_sz)
+
+                ext_dir = os.path.join(self.test_root, "extracted")
+                shutil.rmtree(ext_dir, True)
+                arc = pkg.p5p.Archive(bad_arc_path, mode="r")
+                self.assertRaisesStringify(pkg.p5p.CorruptArchiveFiles,
+                    arc.extract_package_files, [dest_fhash], ext_dir,
+                    pub="test2")
+                arc.close()
+
+                # Test archive with invalid index; do this by writing some bogus
+                # bytes into the data area for the index.
+                portable.copyfile(arc_path, bad_arc_path)
+                with open(bad_arc_path, "ab+") as dest:
+                        dest.seek(idx_data_offset)
+                        dest.truncate()
+                        with open(arc_path, "rb") as src:
+                                bogus_data = "invalid_index_data"
+                                dest.write(bogus_data)
+                                src.seek(idx_data_offset + len(bogus_data))
+                                dest.write(src.read())
+
+                shutil.rmtree(ext_dir, True)
+                self.assertRaisesStringify(pkg.p5p.InvalidArchive,
+                    pkg.p5p.Archive, bad_arc_path, mode="r")
+
+                # Test archive with invalid index offsets; do this by truncating
+                # an existing archive at the offset of one of its files and then
+                # appending the data for a different archive member in its
+                # place.
+                portable.copyfile(arc_path, bad_arc_path)
+                with open(bad_arc_path, "ab+") as dest:
+                        dest.seek(dest_offset)
+                        dest.truncate()
+                        with open(arc_path, "rb") as src:
+                                src.seek(src_offset)
+                                dest.write(src.read(src_bytes))
+
+                shutil.rmtree(ext_dir, True)
+                arc = pkg.p5p.Archive(bad_arc_path, mode="r")
+                self.assertRaisesStringify(pkg.p5p.InvalidArchive,
+                    arc.extract_package_files, [dest_fhash], ext_dir,
+                    pub="test2")
+                arc.close()
+
+                os.unlink(arc_path)
+                os.unlink(bad_arc_path)
+
+                #
+                # Check that directory where archive expected is handled.
+                #
+                os.mkdir(arc_path)
+                self.assertRaisesStringify(pkg.p5p.InvalidArchive,
+                    pkg.p5p.Archive, arc_path, mode="r")
+                os.rmdir(arc_path)
+
+                # Temporarily change the current archive version and create a
+                # a new archive, and then verify that the expected exception is
+                # raised when an attempt to read it is made.
+                orig_ver = pkg.p5p.Archive.CURRENT_VERSION
+                try:
+                        pkg.p5p.Archive.CURRENT_VERSION = 99 # EVIL
+                        arc = pkg.p5p.Archive(arc_path, mode="w")
+                        arc.close()
+                finally:
+                        # Ensure this is reset to the right value.
+                        pkg.p5p.Archive.CURRENT_VERSION = orig_ver
+
+                self.assertRaisesStringify(pkg.p5p.InvalidArchive,
+                    pkg.p5p.Archive, arc_path, mode="r")
+                os.unlink(arc_path)
+
+
+if __name__ == "__main__":
+        unittest.main()
--- a/src/tests/cli/t_pkg_publisher.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/tests/cli/t_pkg_publisher.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -424,8 +424,9 @@
                     self.bogus_url), exit=1)
                 self.pkg("set-publisher %s http://%s5 test11" % (add_opt,
                     self.bogus_url), exit=1)
-                self.pkg("set-publisher %s %s7 test1" % (add_opt,
-                    self.bogus_url), exit=1)
+                if etype == "origin":
+                        self.pkg("set-publisher %s %s7 test1" % (add_opt,
+                            self.bogus_url), exit=1)
 
                 # Test single remove.
                 self.pkg("set-publisher %s http://%s1 test1" % (remove_opt,
@@ -684,7 +685,7 @@
                 self.pkg("set-publisher -P test2", exit=1)
                 self.pkg("publisher test2")
                 self.pkg("set-publisher -e test2")
-                self.pkg("publisher | grep test2")
+                self.pkg("publisher -n | grep test2")
                 self.pkg("list -a bar")
 
                 self.pkg("set-publisher --disable test2")
@@ -693,7 +694,7 @@
                 self.pkg("list -a bar", exit=1)
                 self.pkg("publisher -a | grep test2")
                 self.pkg("set-publisher --enable test2")
-                self.pkg("publisher | grep test2")
+                self.pkg("publisher -n | grep test2")
                 self.pkg("list -a bar")
 
                 # should fail because test is the preferred publisher
@@ -707,7 +708,7 @@
                 self.pkg("publisher -H | head -1 | egrep test1")
                 self.pkg("publisher -H | head -2 | egrep test2")
                 self.pkg("publisher -H | head -3 | egrep test3")
-                # make test2 disabled, make sure order is preserved                
+                # make test2 disabled, make sure order is preserved
                 self.pkg("set-publisher --disable test2")
                 self.pkg("publisher") # ease debugging
                 self.pkg("publisher -H | head -1 | egrep test1")
@@ -722,13 +723,13 @@
                 self.pkg("publisher -H | head -3 | egrep test2")
                 # move test3 after test1
                 self.pkg("set-publisher --search-after=test1 test3")
-                self.pkg("publisher") # ease debugging              
+                self.pkg("publisher") # ease debugging
                 self.pkg("publisher -H | head -1 | egrep test1")
                 self.pkg("publisher -H | head -2 | egrep test3")
                 self.pkg("publisher -H | head -3 | egrep test2")
                 # move test2 before test3
                 self.pkg("set-publisher --search-before=test3 test2")
-                self.pkg("publisher") # ease debugging              
+                self.pkg("publisher") # ease debugging
                 self.pkg("publisher -H | head -1 | egrep test1")
                 self.pkg("publisher -H | head -2 | egrep test2")
                 self.pkg("publisher -H | head -3 | egrep test3")
@@ -745,7 +746,7 @@
                 self.rurl1 = self.dcs[1].get_repo_url()
                 self.rurl2 = self.dcs[2].get_repo_url()
                 self.image_create(self.rurl1, prefix="test1")
-                
+
         def test_new_publisher_ca_certs_with_refresh(self):
                 """Check that approving and revoking CA certs is reflected in
                 the output of pkg publisher and that setting the CA certs when
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/cli/t_pkg_temp_sources.py	Wed Feb 09 18:43:21 2011 -0800
@@ -0,0 +1,841 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+# Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+import pkg5unittest
+
+import os
+import pkg.fmri as fmri
+import pkg.portable as portable
+import pkg.misc as misc
+import pkg.p5p
+import shutil
+import stat
+import tempfile
+import unittest
+
+
+class TestPkgTempSources(pkg5unittest.ManyDepotTestCase):
+
+        # Don't discard repository or setUp() every test.
+        persistent_setup = True
+
+        foo_pkg = """
+            open pkg://test/[email protected]
+            add set name=pkg.summary value="Example package foo."
+            add dir mode=0755 owner=root group=bin path=lib
+            add dir mode=0755 owner=root group=bin path=usr
+            add dir mode=0755 owner=root group=bin path=usr/bin
+            add dir mode=0755 owner=root group=bin path=usr/local
+            add dir mode=0755 owner=root group=bin path=usr/local/bin
+            add dir mode=0755 owner=root group=bin path=usr/share
+            add dir mode=0755 owner=root group=bin path=usr/share/doc
+            add dir mode=0755 owner=root group=bin path=usr/share/doc/foo
+            add dir mode=0755 owner=root group=bin path=usr/share/man
+            add dir mode=0755 owner=root group=bin path=usr/share/man/man1
+            add file tmp/foo mode=0755 owner=root group=bin path=usr/bin/foo
+            add file tmp/libfoo.so.1 mode=0755 owner=root group=bin path=lib/libfoo.so.1 variant.debug.foo=false
+            add file tmp/libfoo_debug.so.1 mode=0755 owner=root group=bin path=lib/libfoo.so.1 variant.debug.foo=true
+            add file tmp/foo.1 mode=0444 owner=root group=bin path=usr/share/man/man1/foo.1 facet.doc.man=true
+            add file tmp/README mode=0444 owner=root group=bin path=/usr/share/doc/foo/README
+            add link path=usr/local/bin/soft-foo target=usr/bin/foo
+            add hardlink path=usr/local/bin/hard-foo target=/usr/bin/foo
+            close """
+
+        incorp_pkg = """
+            open pkg://test/[email protected]
+            add set name=pkg.summary value="Incorporation"
+            add depend type=incorporate [email protected],5.11-0.1
+            close
+            open pkg://test/[email protected]
+            add set name=pkg.summary value="Incorporation"
+            add depend type=incorporate [email protected],5.11-0.2
+            close """
+
+        signed_pkg = """
+            open pkg://test/[email protected]
+            add depend type=require [email protected]
+            add dir mode=0755 owner=root group=bin path=usr/bin
+            add file tmp/quux mode=0755 owner=root group=bin path=usr/bin/quark
+            add set name=authorized.species value=bobcat
+            close """
+
+        quux_pkg = """
+            open pkg://test2/[email protected],5.11-0.1
+            add set name=pkg.summary value="Example package quux."
+            add depend type=require fmri=pkg:/incorp
+            close
+            open pkg://test2/[email protected],5.11-0.2
+            add set name=pkg.summary value="Example package quux."
+            add depend type=require fmri=pkg:/incorp
+            add dir mode=0755 owner=root group=bin path=usr
+            add dir mode=0755 owner=root group=bin path=usr/bin
+            add file tmp/quux mode=0755 owner=root group=bin path=usr/bin/quux
+            close """
+
+        misc_files = ["tmp/foo", "tmp/libfoo.so.1", "tmp/libfoo_debug.so.1",
+            "tmp/foo.1", "tmp/README", "tmp/LICENSE", "tmp/quux"]
+
+        def __seed_ta_dir(self, certs, dest_dir=None):
+                if isinstance(certs, basestring):
+                        certs = [certs]
+                if not dest_dir:
+                        dest_dir = self.ta_dir
+                self.assert_(dest_dir)
+                self.assert_(self.raw_trust_anchor_dir)
+                for c in certs:
+                        name = "%s_cert.pem" % c
+                        portable.copyfile(
+                            os.path.join(self.raw_trust_anchor_dir, name),
+                            os.path.join(dest_dir, name))
+
+        def image_create(self, *args, **kwargs):
+                pkg5unittest.ManyDepotTestCase.image_create(self,
+                    *args, **kwargs)
+                self.ta_dir = os.path.join(self.img_path, "etc/certs/CA")
+                os.makedirs(self.ta_dir)
+
+        def __publish_packages(self, rurl):
+                """Private helper function to publish packages needed for
+                testing.
+                """
+
+                pkgs = "".join([self.foo_pkg, self.incorp_pkg, self.signed_pkg,
+                    self.quux_pkg])
+
+                # Publish packages needed for tests.
+                plist = self.pkgsend_bulk(rurl, pkgs)
+
+                # Sign the 'signed' package.
+                r = self.get_repo(self.dcs[1].get_repodir())
+                r.add_signing_certs([os.path.join(self.pub_cas_dir,
+                    "pubCA1_ta1_cert.pem")], ca=True)
+                r.add_signing_certs([os.path.join(self.pub_cas_dir,
+                    "pubCA1_ta3_cert.pem")], ca=True)
+                r.add_signing_certs([os.path.join(self.inter_certs_dir,
+                    "i1_ta1_cert.pem")], ca=False)
+                r.add_signing_certs([os.path.join(self.inter_certs_dir,
+                    "i2_ta1_cert.pem")], ca=False)
+
+                sign_args = \
+                    "-k %(key)s -c %(cert)s -i %(i1)s -i %(i2)s %(pkg)s" % {
+                    "key": os.path.join(self.keys_dir,
+                        "cs1_pubCA1_key.pem"),
+                    "cert": os.path.join(self.cs_dir,
+                        "cs1_pubCA1_cert.pem"),
+                    "i1": os.path.join(self.chain_certs_dir,
+                        "ch1_pubCA1_cert.pem"),
+                    "i2": os.path.join(self.chain_certs_dir,
+                        "ch2_pubCA1_cert.pem"),
+                    "pkg": plist[3]
+                }
+                self.pkgsign(rurl, sign_args)
+
+                # This is just a test assertion to verify that the
+                # package was signed as expected.
+                self.image_create(rurl, prefix=None)
+                self.__seed_ta_dir("ta1")
+                self.pkg("set-property signature-policy verify")
+                self.pkg("install signed")
+                self.image_destroy()
+
+                return [
+                    fmri.PkgFmri(sfmri)
+                    for sfmri in plist
+                ]
+
+        def __archive_packages(self, arc_name, repo, plist):
+                """Private helper function to archive packages needed for
+                testing.
+                """
+
+                arc_path = os.path.join(self.test_root, arc_name)
+                assert not os.path.exists(arc_path)
+
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+                for pfmri in plist:
+                        arc.add_repo_package(pfmri, repo)
+                arc.close()
+
+                return arc_path
+
+        def setUp(self):
+                pkg5unittest.ManyDepotTestCase.setUp(self, ["test", "test",
+                    "empty"])
+                self.make_misc_files(self.misc_files)
+
+                # First repository will contain all packages.
+                self.all_rurl = self.dcs[1].get_repo_url()
+
+                # Second repository will contain only foo.
+                self.foo_rurl = self.dcs[2].get_repo_url()
+
+                # Third will be empty.
+                self.empty_rurl = self.dcs[3].get_repo_url()
+
+                # Setup base test paths.
+                self.path_to_certs = os.path.join(self.ro_data_root,
+                    "signing_certs", "produced")
+                self.keys_dir = os.path.join(self.path_to_certs, "keys")
+                self.cs_dir = os.path.join(self.path_to_certs,
+                    "code_signing_certs")
+                self.chain_certs_dir = os.path.join(self.path_to_certs,
+                    "chain_certs")
+                self.pub_cas_dir = os.path.join(self.path_to_certs,
+                    "publisher_cas")
+                self.inter_certs_dir = os.path.join(self.path_to_certs,
+                    "inter_certs")
+                self.raw_trust_anchor_dir = os.path.join(self.path_to_certs,
+                    "trust_anchors")
+                self.crl_dir = os.path.join(self.path_to_certs, "crl")
+
+                # Publish packages.
+                plist = self.__publish_packages(self.all_rurl)
+
+                # Copy foo to second repository.
+                self.pkgrecv(self.all_rurl, "-d %s foo" % self.foo_rurl)
+
+                # Now create a package archive containing all packages, and
+                # then one for each.
+                repo = self.dcs[1].get_repo()
+                self.all_arc = self.__archive_packages("all_pkgs.p5p", repo,
+                    plist)
+
+                for alist in ([plist[0]], [plist[1], plist[2]], [plist[3]],
+                    [plist[4], plist[5]]):
+                        arc_path = self.__archive_packages(
+                            "%s.p5p" % alist[0].pkg_name, repo, alist)
+                        setattr(self, "%s_arc" % alist[0].pkg_name, arc_path)
+
+                self.ta_dir = None
+
+                # Copy an archive and set its permissions to 0000 for testing
+                # unprivileged user access attempts.
+                self.perm_arc = os.path.join(self.test_root, "noaccess.p5p")
+                portable.copyfile(self.foo_arc, self.perm_arc)
+                os.chmod(self.perm_arc, 0)
+
+                # Create an empty archive.
+                arc_path = os.path.join(self.test_root, "empty.p5p")
+                arc = pkg.p5p.Archive(arc_path, mode="w")
+                arc.close()
+                self.empty_arc = arc_path
+
+                # Store FMRIs for later use.
+                self.foo10 = plist[0]
+                self.incorp10 = plist[1]
+                self.incorp20 = plist[2]
+                self.signed10 = plist[3]
+                self.quux01 = plist[4]
+                self.quux10 = plist[5]
+
+        def test_00_list(self):
+                """Verify that the list operation works as expected for
+                temporary origins.
+                """
+
+                # Create an image and verify no packages are known.
+                self.image_create(self.empty_rurl, prefix=None)
+                self.pkg("list -a", exit=1)
+
+                # Verify graceful failure for an empty source alone or in
+                # combination with another temporary source.
+                self.pkg("list -H -g %s" % self.empty_arc, exit=1)
+                self.pkg("list -H -g %s -g %s" % (self.empty_arc,
+                    self.foo_arc), exit=1)
+
+                # Verify graceful failure if source doesn't exist.
+                self.pkg("list -H -g %s" % (self.foo_arc + ".nosuchpkg"),
+                    exit=1)
+
+                # Verify graceful failure if user doesn't have permission to
+                # access temporary source.
+                self.pkg("list -H -g %s" % self.perm_arc, su_wrap=True, exit=1)
+
+                # Verify graceful list failure if -u is used with -g.
+                self.pkg("list -H -u -g %s" % self.foo_arc, exit=2)
+
+                # Verify list output for a single package temporary source.
+                # -a is used here to verify that even though -a is implicit,
+                # it is not an error to specify it.
+                self.pkg("list -aH -g %s" % self.foo_arc)
+                expected = "foo (test) 1.0 known -----\n"
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Note that -a is implicit when -g is used, so all of
+                # the following tests purposefully omit it.
+
+                # Verify list output for a multiple package temporary source
+                # as an unprivileged user.
+                self.pkg("list -fH -g %s" % self.all_arc, su_wrap=True)
+                expected = \
+                    ("foo (test) 1.0 known -----\n"
+                    "incorp (test) 2.0 known -----\n"
+                    "incorp (test) 1.0 known u----\n"
+                    "quux (test2) 1.0-0.2 known -----\n"
+                    "quux (test2) 0.1-0.1 known u----\n"
+                    "signed (test) 1.0 known -----\n")
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify list output for a multiple package temporary source.
+                self.pkg("list -fH -g %s" % self.all_arc)
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify list output for multiple temporary sources using
+                # different combinations of archives and repositories.
+                self.pkg("list -fH -g %s -g %s" % (self.signed_arc,
+                    self.foo_rurl))
+                expected = \
+                    ("foo (test) 1.0 known -----\n"
+                    "signed (test) 1.0 known -----\n")
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                self.pkg("list -fH -g %s -g %s -g %s -g %s" % (self.signed_arc,
+                    self.incorp_arc, self.quux_arc, self.foo_arc))
+                expected = \
+                    ("foo (test) 1.0 known -----\n"
+                    "incorp (test) 2.0 known -----\n"
+                    "incorp (test) 1.0 known u----\n"
+                    "quux (test2) 1.0-0.2 known -----\n"
+                    "quux (test2) 0.1-0.1 known u----\n"
+                    "signed (test) 1.0 known -----\n")
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                self.pkg("list -fH -g %s -g %s -g %s -g %s" % (self.signed_arc,
+                    self.quux_arc, self.incorp_arc, self.foo_rurl))
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                self.pkg("list -fH -g %s -g %s" % (self.all_arc,
+                    self.all_rurl))
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify list -g without -f.
+                self.pkg("list -H -g %s -g %s -g %s -g %s" % (self.signed_arc,
+                    self.quux_arc, self.incorp_arc, self.foo_rurl))
+                expected = \
+                    ("foo (test) 1.0 known -----\n"
+                    "incorp (test) 2.0 known -----\n"
+                    "quux (test2) 1.0-0.2 known -----\n"
+                    "signed (test) 1.0 known -----\n")
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify package installed from archive shows in default list
+                # output.
+                self.pkg("install -g %s [email protected]" % self.incorp_arc)
+                self.pkg("list -H")
+                expected = "incorp (test) 1.0 installed -----\n"
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify list -g with an incorp installed without -f.
+                self.pkg("list -H -g %s -g %s -g %s -g %s" % (self.signed_arc,
+                    self.quux_arc, self.incorp_arc, self.foo_rurl))
+                expected = \
+                    ("foo (test) 1.0 known -----\n"
+                    "incorp (test) 1.0 installed u----\n"
+                    "quux (test2) 0.1-0.1 known u----\n"
+                    "signed (test) 1.0 known -----\n")
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify output again as unprivileged user.
+                self.pkg("list -H -g %s -g %s -g %s -g %s" % (self.signed_arc,
+                    self.quux_arc, self.incorp_arc, self.foo_rurl),
+                    su_wrap=True)
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify list -g with an incorp installed with -f.
+                self.pkg("list -fH -g %s -g %s -g %s -g %s" % (self.signed_arc,
+                    self.quux_arc, self.incorp_arc, self.foo_rurl))
+                expected = \
+                    ("foo (test) 1.0 known -----\n"
+                    "incorp (test) 2.0 known -----\n"
+                    "incorp (test) 1.0 installed u----\n"
+                    "quux (test2) 1.0-0.2 known -----\n"
+                    "quux (test2) 0.1-0.1 known u----\n"
+                    "signed (test) 1.0 known -----\n")
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify list -g with an incorp installed and -n.
+                self.pkg("list -nH -g %s -g %s -g %s -g %s" % (self.signed_arc,
+                    self.quux_arc, self.incorp_arc, self.foo_rurl))
+                expected = \
+                    ("foo (test) 1.0 known -----\n"
+                    "incorp (test) 2.0 known -----\n"
+                    "quux (test2) 1.0-0.2 known -----\n"
+                    "signed (test) 1.0 known -----\n")
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Uninstall all packages and verify there are no known packages.
+                self.pkg("uninstall \*")
+                self.pkg("list -af", exit=1)
+
+                # Cleanup.
+                self.image_destroy()
+
+        def test_01_info(self):
+                """Verify that the info operation works as expected for
+                temporary origins.
+                """
+
+                # Create an image and verify no packages are known.
+                self.image_create(self.empty_rurl, prefix=None)
+                self.pkg("list -a", exit=1)
+
+                # Verify graceful failure for an empty source alone or in
+                # combination with another temporary source.
+                self.pkg("info -g %s \*" % self.empty_arc, exit=1)
+                self.pkg("info -g %s -g %s foo" % (self.empty_arc,
+                    self.foo_arc), exit=1)
+
+                # Verify graceful failure if source doesn't exist.
+                self.pkg("info -g %s foo" % (self.foo_arc + ".nosuchpkg"),
+                    exit=1)
+
+                # Verify graceful failure if user doesn't have permission to
+                # access temporary source.
+                self.pkg("info -g %s foo" % self.perm_arc, su_wrap=True, exit=1)
+
+                # Verify graceful failure if -l is used with -g.
+                self.pkg("info -l -g %s foo" % self.foo_arc, exit=2)
+
+                # Verify output for a single package temporary source.
+                # -r is used here to verify that even though -r is implicit,
+                # it is not an error to specify it.
+                def pd(pfmri):
+                        return pfmri.version.get_timestamp().strftime("%c")
+
+                self.pkg("info -r -g %s foo" % self.foo_arc, su_wrap=True)
+                expected = """\
+          Name: foo
+       Summary: Example package foo.
+         State: Not installed
+     Publisher: test
+       Version: 1.0
+ Build Release: 5.11
+        Branch: None
+Packaging Date: %(pkg_date)s
+          Size: 41.00 B
+          FMRI: %(pkg_fmri)s
+""" % { "pkg_date": pd(self.foo10), "pkg_fmri": self.foo10 }
+                self.assertEqualDiff(expected, self.output)
+
+                # Again, as prvileged user.
+                self.pkg("info -r -g %s foo" % self.foo_arc)
+                self.assertEqualDiff(expected, self.output)
+
+                # Note that -r is implicit when -g is used, so all of
+                # the following tests purposefully omit it.
+
+                # Verify info output for a multiple package temporary source
+                # as an unprivileged user.
+                self.pkg("info -g %s \*" % self.all_arc, su_wrap=True)
+                expected = """\
+          Name: foo
+       Summary: Example package foo.
+         State: Not installed
+     Publisher: test
+       Version: 1.0
+ Build Release: 5.11
+        Branch: None
+Packaging Date: %(foo10_pkg_date)s
+          Size: 41.00 B
+          FMRI: %(foo10_pkg_fmri)s
+
+          Name: incorp
+       Summary: Incorporation
+         State: Not installed
+     Publisher: test
+       Version: 2.0
+ Build Release: 5.11
+        Branch: None
+Packaging Date: %(incorp20_pkg_date)s
+          Size: 0.00 B
+          FMRI: %(incorp20_pkg_fmri)s
+
+          Name: quux
+       Summary: Example package quux.
+         State: Not installed
+     Publisher: test2
+       Version: 1.0
+ Build Release: 5.11
+        Branch: 0.2
+Packaging Date: %(quux10_pkg_date)s
+          Size: 8.00 B
+          FMRI: %(quux10_pkg_fmri)s
+
+          Name: signed
+       Summary: 
+         State: Not installed
+     Publisher: test
+       Version: 1.0
+ Build Release: 5.11
+        Branch: None
+Packaging Date: %(signed10_pkg_date)s
+          Size: 2.76 kB
+          FMRI: %(signed10_pkg_fmri)s
+""" % { "foo10_pkg_date": pd(self.foo10), "foo10_pkg_fmri": self.foo10,
+    "incorp20_pkg_date": pd(self.incorp20), "incorp20_pkg_fmri": self.incorp20,
+    "quux10_pkg_date": pd(self.quux10), "quux10_pkg_fmri": self.quux10,
+    "signed10_pkg_date": pd(self.signed10), "signed10_pkg_fmri": self.signed10
+    }
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify info output for a multiple package temporary source.
+                self.pkg("info -g %s [email protected] [email protected] [email protected] "
+                    "[email protected]" % self.all_arc)
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify info result for multiple temporary sources using
+                # different combinations of archives and repositories.
+                self.pkg("info -g %s -g %s [email protected] [email protected] [email protected]" % (
+                    self.signed_arc, self.foo_rurl))
+
+                self.pkg("info -g %s -g %s -g %s -g %s [email protected] [email protected] "
+                    "[email protected] [email protected]" % (
+                    self.signed_arc, self.incorp_arc, self.quux_arc,
+                    self.foo_arc))
+
+                self.pkg("info -g %s -g %s -g %s -g %s [email protected] [email protected] "
+                    "[email protected] [email protected]" % (
+                    self.signed_arc, self.incorp_arc, self.quux_arc,
+                    self.foo_rurl))
+
+                self.pkg("info -g %s -g %s [email protected] [email protected] [email protected] "
+                    "[email protected]" % (self.all_arc, self.all_rurl))
+
+                # Verify package installed from archive shows in default info
+                # output.
+                self.pkg("install -g %s [email protected]" % self.foo_arc)
+                self.pkg("info")
+                expected = """\
+          Name: foo
+       Summary: Example package foo.
+         State: Installed
+     Publisher: test
+       Version: 1.0
+ Build Release: 5.11
+        Branch: None
+Packaging Date: %(pkg_date)s
+          Size: 41.00 B
+          FMRI: %(pkg_fmri)s
+""" % { "pkg_date": pd(self.foo10), "pkg_fmri": self.foo10 }
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify that when showing package info from archive that
+                # package shows as installed if it matches the installed one.
+                self.pkg("info -g %s foo" % self.foo_arc)
+                self.assertEqualDiff(expected, self.output)
+
+                # Uninstall all packages and verify there are no known packages.
+                self.pkg("uninstall \*")
+                self.pkg("info -r \*", exit=1)
+
+                # Cleanup.
+                self.image_destroy()
+
+        def test_02_contents(self):
+                """Verify that the contents operation works as expected for
+                temporary origins.
+                """
+
+                # Create an image and verify no packages are known.
+                self.image_create(self.empty_rurl, prefix=None)
+                self.pkg("list -a", exit=1)
+
+                # Verify graceful failure for an empty source alone or in
+                # combination with another temporary source.
+                self.pkg("contents -g %s \*" % self.empty_arc, exit=1)
+                self.pkg("contents -g %s -g %s foo" % (self.empty_arc,
+                    self.foo_arc), exit=1)
+
+                # Verify graceful failure if source doesn't exist.
+                self.pkg("contents -g %s foo" % (self.foo_arc + ".nosuchpkg"),
+                    exit=1)
+
+                # Verify graceful failure if user doesn't have permission to
+                # access temporary source.
+                self.pkg("contents -g %s foo" % self.perm_arc, su_wrap=True,
+                    exit=1)
+
+                # Verify output for a single package temporary source.
+                # -r is used here to verify that even though -r is implicit,
+                # it is not an error to specify it.
+                def pd(pfmri):
+                        return pfmri.version.get_timestamp().strftime("%c")
+
+                self.pkg("contents -mr -g %s foo" % self.foo_arc, su_wrap=True)
+                expected = """\
+set name=pkg.fmri value=%s
+set name=pkg.summary value="Example package foo."
+dir group=bin mode=0755 owner=root path=lib
+dir group=bin mode=0755 owner=root path=usr
+dir group=bin mode=0755 owner=root path=usr/bin
+dir group=bin mode=0755 owner=root path=usr/local
+dir group=bin mode=0755 owner=root path=usr/local/bin
+dir group=bin mode=0755 owner=root path=usr/share
+dir group=bin mode=0755 owner=root path=usr/share/doc
+dir group=bin mode=0755 owner=root path=usr/share/doc/foo
+dir group=bin mode=0755 owner=root path=usr/share/man
+dir group=bin mode=0755 owner=root path=usr/share/man/man1
+file 0acf1107d31f3bab406f8611b21b8fade78ac874 chash=20db00fbd7c9fb551e54c5b424bf24d48cf81b7a facet.doc.man=true group=bin mode=0444 owner=root path=usr/share/man/man1/foo.1 pkg.csize=29 pkg.size=9
+file b265f2ec87c4a55eb2b6b4c926e7c65f7247a27e chash=5ae38559680146c49d647163ac2f60cdf43e20d8 group=bin mode=0755 owner=root path=usr/bin/foo pkg.csize=27 pkg.size=7
+file 4ea0699d20b99238a877051e50406687fd4fe163 chash=7a23120f5a4f1eae2829a707020d0cdbab10e9a2 group=bin mode=0755 owner=root path=lib/libfoo.so.1 pkg.csize=41 pkg.size=21 variant.debug.foo=true
+file a285ada5f3cae14ea00e97a8d99bd3e357cb0dca chash=97a09a2356d068d8dbe418de90012908c095d3e2 group=bin mode=0755 owner=root path=lib/libfoo.so.1 pkg.csize=35 pkg.size=15 variant.debug.foo=false
+file dc84bd4b606fe43fc892eb245d9602b67f8cba38 chash=e1106f9505253dfe46aa48c353740f9e1896a844 group=bin mode=0444 owner=root path=usr/share/doc/foo/README pkg.csize=30 pkg.size=10
+hardlink path=usr/local/bin/hard-foo target=/usr/bin/foo
+link path=usr/local/bin/soft-foo target=usr/bin/foo
+""" % self.foo10
+
+                # Again, as prvileged user.
+                self.pkg("contents -mr -g %s foo" % self.foo_arc)
+                self.assertEqualDiff(sorted(expected.splitlines()),
+                    sorted(self.output.splitlines()))
+
+                # Note that -r is implicit when -g is used, so all of
+                # the following tests purposefully omit it.
+
+                # Verify contents result for multiple temporary sources using
+                # different combinations of archives and repositories.
+                self.pkg("contents -g %s -g %s [email protected] [email protected] "
+                    "[email protected]" % (self.signed_arc, self.foo_rurl))
+
+                self.pkg("contents -g %s -g %s -g %s -g %s [email protected] [email protected] "
+                    "[email protected] [email protected]" % (self.signed_arc, self.incorp_arc,
+                    self.quux_arc, self.foo_arc))
+
+                self.pkg("contents -g %s -g %s -g %s -g %s [email protected] [email protected] "
+                    "[email protected] [email protected]" % (self.signed_arc, self.incorp_arc,
+                    self.quux_arc, self.foo_rurl))
+
+                self.pkg("contents -g %s -g %s [email protected] [email protected] [email protected] "
+                    "[email protected]" % (self.all_arc, self.all_rurl))
+
+                # Verify package installed from archive can be used with
+                # contents.
+                self.pkg("install -g %s [email protected]" % self.foo_arc)
+                self.pkg("contents foo")
+
+                # Uninstall all packages and verify there are no known packages.
+                self.pkg("uninstall \*")
+                self.pkg("contents -r \*", exit=1)
+
+                # Cleanup.
+                self.image_destroy()
+
+        def test_03_install_update(self):
+                """Verify that install and update work as expected for temporary
+                origins.
+                """
+
+                #
+                # Create an image and verify no packages are known.
+                #
+                self.image_create(self.empty_rurl, prefix=None)
+                self.pkg("list -a", exit=1)
+
+                # Verify graceful failure if source doesn't exist.
+                self.pkg("install -g %s foo" % (self.foo_arc + ".nosuchpkg"),
+                    exit=1)
+
+                # Verify graceful failure if user doesn't have permission to
+                # access temporary source.
+                self.pkg("install -g %s foo" % self.perm_arc, su_wrap=True,
+                    exit=1)
+
+                # Verify attempting to install a package with a missing
+                # dependency fails gracefully.
+                self.pkg("install -g %s signed" % self.signed_arc, exit=1)
+
+                # Verify a package from a publisher not already configured can
+                # be installed using temporary origins.  Installing a package
+                # in this scenario will result in the publisher being added
+                # but without any origin information.
+                self.pkg("install -g %s foo" % self.foo_arc)
+                self.pkg("list foo")
+
+                # Verify that publisher exists now (without origin information)
+                # and is enabled and sticky (-n omits disabled publishers).
+                self.pkg("publisher -nH")
+                expected = """\
+empty (preferred) origin online %s/
+test 
+""" % self.empty_rurl
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                # Verify that signed package can now be installed since
+                # dependency was satisfied.
+                self.pkg("install -g %s signed" % self.signed_arc)
+                self.pkg("list foo signed")
+
+                # Verify that removing all packages leaves no packages known
+                # even though publisher remains configured.
+                self.pkg("uninstall \*")
+                self.pkg("list -af", exit=1)
+
+                # Verify publisher can be removed.
+                self.pkg("unset-publisher test")
+
+                #
+                # Create an image using the foo archive.
+                #
+                self.image_create(misc.parse_uri(self.foo_arc), prefix=None)
+                self.__seed_ta_dir("ta1")
+
+                # Verify that signed package can be installed and the archive
+                # configured for the publisher allows dependencies to be
+                # satisfied.
+                self.pkg("set-property signature-policy verify")
+                self.pkg("install -g %s signed" % self.signed_arc)
+                self.pkg("list foo signed")
+
+                # Verify that removing all packages leaves only foo known.
+                self.pkg("uninstall \*")
+                self.pkg("list -aH")
+                expected = "foo 1.0 known -----\n"
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+                #
+                # Create an image and verify no packages are known.
+                #
+                self.image_create(self.empty_rurl, prefix=None)
+                self.pkg("list -a", exit=1)
+
+                # Install an older version of a known package.
+                self.pkg("install -g %s [email protected]" % self.all_arc)
+                self.pkg("list [email protected] [email protected]")
+
+                # Verify graceful failure if source doesn't exist.
+                self.pkg("update -g %s foo" % (self.foo_arc + ".nosuchpkg"),
+                    exit=1)
+
+                # Verify graceful failure if user doesn't have permission to
+                # access temporary source.
+                self.pkg("update -g %s foo" % self.perm_arc, su_wrap=True,
+                    exit=1)
+
+                # Verify that packages can be updated using temporary origins.
+                self.pkg("update -g %s -g %s" % (self.incorp_arc,
+                    self.quux_arc))
+                self.pkg("list [email protected] [email protected]")
+
+                # Verify that both test and test2 are configured without
+                # origins.
+                self.pkg("publisher -H")
+                expected = """\
+empty (preferred) origin online %s/
+test 
+test2 
+""" % self.empty_rurl
+                output = self.reduceSpaces(self.output)
+                self.assertEqualDiff(expected, output)
+
+        def test_04_change_varcets(self):
+                """Verify that change-facet and change-variant work as expected
+                for temporary origins.
+                """
+
+                #
+                # Create an image and verify no packages are known.
+                #
+                self.image_create(self.empty_rurl, prefix=None)
+                self.pkg("list -a", exit=1)
+                self.pkg("set-property flush-content-cache-on-sucess True")
+
+                # Install a package from an archive.
+                self.pkg("install -g %s foo" % self.foo_arc)
+
+                #
+                # Verify change-facet can use temporary origins.
+                #
+                fpath = os.path.join(self.img_path, "usr/share/man/man1/foo.1")
+                assert os.path.exists(fpath)
+
+                # Now set facet.doc.man to false and verify faceted item is
+                # gone.
+                self.pkg("change-facet facet.doc.man=false")
+                assert not os.path.exists(fpath)
+
+                # Now attempt to set the facet to true again; this should
+                # fail.
+                self.pkg("change-facet facet.doc.man=true", exit=1)
+
+                # Verify graceful failure if source doesn't exist.
+                self.pkg("change-facet -g %s facet.doc.man=true" %
+                    (self.foo_arc + ".nosuchpkg"), exit=1)
+
+                # Verify graceful failure if user doesn't have permission to
+                # access temporary source.
+                self.pkg("change-facet -g %s facet.doc.man=true" %
+                    self.perm_arc, su_wrap=True, exit=1)
+
+                # Verify that if the original archive is provided, the operation
+                # will succeed.
+                self.pkg("change-facet -g %s facet.doc.man=True" % self.foo_arc)
+                assert os.path.exists(fpath)
+
+                #
+                # Verify change-variant can use temporary origins.
+                #
+                vpath = os.path.join(self.img_path, "lib/libfoo.so.1")
+                assert os.path.exists(vpath)
+                self.assertEqual(os.stat(vpath).st_size, 15)
+
+                # Now attempt to change the debug variant; this should fail.
+                self.pkg("change-variant -vv variant.debug.foo=true", exit=1)
+
+                # Verify graceful failure if source doesn't exist.
+                self.pkg("change-variant -vvg %s variant.debug.foo=true" %
+                    (self.foo_arc + ".nosuchpkg"), exit=1)
+
+                # Verify graceful failure if user doesn't have permission to
+                # access temporary source.
+                self.pkg("change-variant -vvg %s variant.debug.foo=true" %
+                    self.perm_arc, su_wrap=True, exit=1)
+
+                # Verify that if the original archive is provided, the operation
+                # will succeed.
+                self.pkg("change-variant -vvg %s variant.debug.foo=true" %
+                    self.foo_arc)
+                assert os.path.exists(vpath)
+                self.assertEqual(os.stat(vpath).st_size, 21)
+
+
+if __name__ == "__main__":
+        unittest.main()
--- a/src/tests/cli/t_pkgrecv.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/tests/cli/t_pkgrecv.py	Wed Feb 09 18:43:21 2011 -0800
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -35,6 +35,7 @@
 import pkg.fmri as fmri
 import pkg.manifest as manifest
 import pkg.misc as misc
+import pkg.p5p as p5p
 import pkg.server.repository as repo
 import shutil
 import tempfile
@@ -518,6 +519,104 @@
                 self.pkgrecv(self.durl3, "-d %s nosuchpackage" % self.durl4,
                     exit=1)
 
+        def test_8_archive(self):
+                """Verify that pkgrecv handles package archives as expected."""
+
+                # Setup a repository with packages from multiple publishers.
+                amber = self.amber10.replace("open ", "open pkg://test2/")
+                t2_amber10 = self.pkgsend_bulk(self.durl3, amber)[0]
+                self.pkgrecv(self.durl1, "-d %s [email protected] [email protected]" %
+                    self.durl3)
+
+                # Now attempt to receive from a repository to a package archive.
+                arc_path = os.path.join(self.test_root, "test.p5p")
+                self.pkgrecv(self.durl3, "-a -d %s \*" % arc_path)
+
+                #
+                # Verify that the archive can be opened and the expected
+                # packages are inside.
+                #
+                amber10 = self.published[0]
+                bronze10 = self.published[2]
+                arc = p5p.Archive(arc_path, mode="r")
+
+                # Check for expected publishers.
+                expected = set(["test1", "test2"])
+                pubs = set(p.prefix for p in arc.get_publishers())
+                self.assertEqualDiff(expected, pubs)
+
+                # Check for expected package FMRIs.
+                expected = set([amber10, t2_amber10, bronze10])
+                tmpdir = tempfile.mkdtemp(dir=self.test_root)
+                returned = []
+                for pfx in pubs:
+                        catdir = os.path.join(tmpdir, pfx)
+                        os.mkdir(catdir)
+                        for part in ("catalog.attrs", "catalog.base.C"):
+                                arc.extract_catalog1(part, catdir, pfx)
+
+                        cat = catalog.Catalog(meta_root=catdir, read_only=True)
+                        returned.extend(str(f) for f in cat.fmris())
+                self.assertEqualDiff(expected, set(returned))
+                arc.close()
+                shutil.rmtree(tmpdir)
+
+                #
+                # Verify that packages can be received from an archive to an
+                # archive.
+                #
+                arc2_path = os.path.join(self.test_root, "test2.p5p")
+                self.pkgrecv(arc_path, "-a -d %s pkg://test2/amber" % arc2_path)
+
+                # Check for expected publishers.
+                arc = p5p.Archive(arc2_path, mode="r")
+                expected = set(["test2"])
+                pubs = set(p.prefix for p in arc.get_publishers())
+                self.assertEqualDiff(expected, pubs)
+
+                # Check for expected package FMRIs.
+                expected = set([t2_amber10])
+                tmpdir = tempfile.mkdtemp(dir=self.test_root)
+                returned = []
+                for pfx in pubs:
+                        catdir = os.path.join(tmpdir, pfx)
+                        os.mkdir(catdir)
+                        for part in ("catalog.attrs", "catalog.base.C"):
+                                arc.extract_catalog1(part, catdir, pfx)
+
+                        cat = catalog.Catalog(meta_root=catdir, read_only=True)
+                        returned.extend(str(f) for f in cat.fmris())
+                self.assertEqualDiff(expected, set(returned))
+                arc.close()
+
+                #
+                # Verify that pkgrecv gracefully fails if archive already
+                # exists.
+                #
+                self.pkgrecv(arc_path, "-d %s \*" % arc2_path, exit=1)
+
+                #
+                # Verify that packages can be received from an archive to
+                # a repository.
+                #
+                self.pkgrecv(arc_path, "--newest")
+                self.pkgrecv(arc_path, "-d %s pkg://test2/amber bronze" %
+                    self.durl4)
+                repo = self.dcs[4].get_repo()
+
+                # Check for expected publishers.
+                expected = set(["test1", "test2"])
+                pubs = repo.publishers
+                self.assertEqualDiff(expected, pubs)
+
+                # Check for expected package FMRIs.
+                expected = sorted([t2_amber10, bronze10])
+                returned = []
+                for pfx in repo.publishers:
+                        cat = repo.get_catalog(pub=pfx)
+                        returned.extend(str(f) for f in cat.fmris())
+                self.assertEqualDiff(expected, sorted(returned))
+
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/pkg5unittest.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/tests/pkg5unittest.py	Wed Feb 09 18:43:21 2011 -0800
@@ -98,7 +98,7 @@
 
 # Version test suite is known to work with.
 PKG_CLIENT_NAME = "pkg"
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 
 ELIDABLE_ERRORS = [ TestSkippedException, depotcontroller.DepotStateException ]
 
@@ -208,6 +208,18 @@
                 return "%s.py %s.%s" % (self.__class__.__module__,
                     self.__class__.__name__, self._testMethodName)
 
+        def assertRaisesStringify(self, excClass, callableObj, *args, **kwargs):
+                """Perform the same logic as assertRaises, but then verify that
+                the exception raised can be stringified."""
+
+                try:
+                        callableObj(*args, **kwargs)
+                except excClass, e:
+                        str(e)
+                        return
+                else:
+                        raise self.failureException, "%s not raised" % excClass
+
         #
         # Uses property() to implements test_root as a read-only attribute.
         #
--- a/src/util/distro-import/importer.py	Wed Feb 09 15:25:24 2011 -0800
+++ b/src/util/distro-import/importer.py	Wed Feb 09 18:43:21 2011 -0800
@@ -56,7 +56,7 @@
 from pkg.misc import emsg
 from pkg.portable import PD_LOCAL_PATH, PD_PROTO_DIR, PD_PROTO_DIR_LIST
 
-CLIENT_API_VERSION = 52
+CLIENT_API_VERSION = 53
 PKG_CLIENT_NAME = "importer.py"
 pkg.client.global_settings.client_name = PKG_CLIENT_NAME