16744 repository multi-publisher on-disk format should be formalized and implemented
authorShawn Walker <shawn.walker@oracle.com>
Thu, 19 Aug 2010 23:33:49 -0700
changeset 2028 b2c674e6ee28
parent 2027 f2e9f684e5be
child 2029 8650d31180a5
16744 repository multi-publisher on-disk format should be formalized and implemented 166 repository class doesn't set directory mode when creating directories 684 read-only depots should look for changes to files 2086 repository class should validate that a repository is really a repository 2671 Indexing should recover from partial success indexing 7425 depot started not readonly but without fork-allowed (or on non-posix system) won't ever serve search 7357 depots don't discover when search becomes available via external means 8725 versioning information for depot and repository metadata needed 12517 interrupting indexing can cause search failure 14636 publisher alias isn't validated 15318 pkg.search_errors EmptyMainDictLine references unbound variable 15670 test suite needs to setup client info/error logger handlers 15807 depot should fail with proper error message if repo structure is not right 16558 repository class should load in-flight transactions on demand 16583 pkgrepo provides too much management 16646 pkgsend close --no-catalog option ignored 16720 pkgsend crashes with PartialIndexingException
doc/client_api_versions.txt
src/client.py
src/depot.py
src/gui/modules/misc_non_gui.py
src/man/pkg.depotd.1m.txt
src/man/pkgrepo.1.txt
src/modules/client/api.py
src/modules/client/api_errors.py
src/modules/client/history.py
src/modules/client/image.py
src/modules/client/imageconfig.py
src/modules/client/publisher.py
src/modules/client/transport/repo.py
src/modules/client/transport/transport.py
src/modules/depotcontroller.py
src/modules/indexer.py
src/modules/misc.py
src/modules/nrlock.py
src/modules/p5i.py
src/modules/publish/transaction.py
src/modules/search_errors.py
src/modules/server/api.py
src/modules/server/catalog.py
src/modules/server/depot.py
src/modules/server/face.py
src/modules/server/feed.py
src/modules/server/repository.py
src/modules/server/transaction.py
src/pkg/Makefile
src/pkgdep.py
src/pkgrepo.py
src/publish.py
src/pull.py
src/sign.py
src/tests/api/t_api.py
src/tests/api/t_api_list.py
src/tests/api/t_api_search.py
src/tests/api/t_file_manager.py
src/tests/api/t_pkg_api_install.py
src/tests/cli/t_pkg_depotd.py
src/tests/cli/t_pkg_history.py
src/tests/cli/t_pkg_image_update.py
src/tests/cli/t_pkg_install.py
src/tests/cli/t_pkg_list.py
src/tests/cli/t_pkg_publisher.py
src/tests/cli/t_pkg_refresh.py
src/tests/cli/t_pkg_search.py
src/tests/cli/t_pkgdep.py
src/tests/cli/t_pkgdep_resolve.py
src/tests/cli/t_pkgrecv.py
src/tests/cli/t_pkgrepo.py
src/tests/cli/t_pkgsend.py
src/tests/cli/t_pkgsign.py
src/tests/cli/t_publish_api.py
src/tests/cli/t_util_merge.py
src/tests/pkg5unittest.py
src/util/distro-import/importer.py
src/web/en/base.shtml
src/web/en/catalog.shtml
src/web/en/index.shtml
src/web/en/search.shtml
src/web/en/stats.shtml
src/web/index.shtml
--- a/doc/client_api_versions.txt	Wed Aug 18 14:52:59 2010 -0700
+++ b/doc/client_api_versions.txt	Thu Aug 19 23:33:49 2010 -0700
@@ -1,11 +1,26 @@
+Version 42:
+Incompatible with clients using versions 0-41.
+
+    pkg.client.api_errors has changed as follows:
+
+        * The MainDictParsingException class was removed
+
+        * The BadPublisherAlias and IndexLockedException
+          exceptions were added.
+
+    pkg.client.publisher has changed as follows:
+
+        * The inter_certs property was renamed to intermediate_certs.
+
 Version 41:
 Compatible with clients using versions 40-41.
 
-    Allows the client to handle signed packages.  This includes the addition
-    of a large number of optional parameters to the client/publisher class.
-    It also makes changes to image creation to allow image properties to be set
-    during the process.  New subclasses of ApiException were created to indicate
-    different error conditions associated with signing failures.
+    Allows the client to handle signed packages.  This includes the
+    addition of a large number of optional parameters to the
+    client/publisher class.  It also makes changes to image creation to
+    allow image properties to be set during the process.  New subclasses
+    of ApiException were created to indicate different error conditions
+    associated with signing failures.
 
 Version 40:
 Incompatible with clients using versions 0-39.
--- a/src/client.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/client.py	Thu Aug 19 23:33:49 2010 -0700
@@ -80,7 +80,7 @@
     RESULT_FAILED_TRANSPORT, RESULT_FAILED_UNKNOWN, RESULT_FAILED_OUTOFMEMORY)
 from pkg.misc import EmptyI, msg, PipeError
 
-CLIENT_API_VERSION = 40
+CLIENT_API_VERSION = 42
 PKG_CLIENT_NAME = "pkg"
 
 JUST_UNKNOWN = 0
@@ -872,9 +872,6 @@
                 # be printed on the same line as the spinner.
                 error("\n" + str(e))
                 return EXIT_OOPS
-        except api_errors.MainDictParsingException, e:
-                error(str(e))
-                return EXIT_OOPS
         except KeyboardInterrupt:
                 raise
         except api_errors.BEException, e:
@@ -3679,9 +3676,6 @@
                 error(str(e) + PROBLEMATIC_PERMISSIONS_ERROR_MESSAGE,
                     cmd="rebuild-index")
                 return EXIT_OOPS
-        except api_errors.MainDictParsingException, e:
-                error(str(e), cmd="rebuild-index")
-                return EXIT_OOPS
         else:
                 return EXIT_OK
 
--- a/src/depot.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/depot.py	Thu Aug 19 23:33:49 2010 -0700
@@ -124,7 +124,7 @@
                 sys.exit(retcode)
 
         print """\
-Usage: /usr/lib/pkg.depotd [-d repo_dir] [-p port] [-s threads]
+Usage: /usr/lib/pkg.depotd [-d inst_root] [-p port] [-s threads]
            [-t socket_timeout] [--cfg] [--content-root]
            [--disable-ops op[/1][,...]] [--debug feature_list]
            [--file-root dir] [--log-access dest] [--log-errors dest]
@@ -132,18 +132,18 @@
            [--socket-path] [--ssl-cert-file] [--ssl-dialog] [--ssl-key-file]
            [--sort-file-max-size size] [--writable-root dir]
 
-        -d              The file system path at which the server should find its
+        -d inst_root    The file system path at which the server should find its
                         repository data.  Required unless PKG_REPO has been set
                         in the environment.
-        -p              The port number on which the instance should listen for
+        -p port         The port number on which the instance should listen for
                         incoming package requests.  The default value is 80 if
                         ssl certificate and key information has not been
                         provided; otherwise, the default value is 443.
-        -s              The maximum number of seconds the server should wait for
+        -s threads      The number of threads that will be started to serve
+                        requests.  The default value is 10.
+        -t timeout      The maximum number of seconds the server should wait for
                         a response from a client before closing a connection.
                         The default value is 60.
-        -t              The number of threads that will be started to serve
-                        requests.  The default value is 10.
         --cfg           The pathname of the file to use when reading and writing
                         depot configuration data, or a fully qualified service
                         fault management resource identifier (FMRI) of the SMF
@@ -162,7 +162,7 @@
                         Possible values are: headers.
         --file-root     The path to the root of the file content for a given
                         repository.  This is used to override the default,
-                        <repo_root>/file.
+                        <inst_root>/file or <inst_root>/publisher/<prefix>/file.
         --log-access    The destination for any access related information
                         logged by the depot process.  Possible values are:
                         stderr, stdout, none, or an absolute pathname.  The
@@ -210,7 +210,7 @@
         --help or -?
 
 Environment:
-        PKG_REPO                Used as default repo_dir if -d not provided.
+        PKG_REPO                Used as default inst_root if -d not provided.
         PKG_DEPOT_CONTENT       Used as default content_root if --content-root
                                 not provided."""
         sys.exit(retcode)
@@ -221,7 +221,6 @@
         def __init__(self, *args):
                 Exception.__init__(self, *args)
 
-
 if __name__ == "__main__":
 
         setlocale(locale.LC_ALL, "")
@@ -386,8 +385,8 @@
                                 rebuild = True
                         elif opt == "--refresh-index":
                                 # Note: This argument is for internal use
-                                # only. It's used when pkg.depotd is reexecing
-                                # itself and needs to know that's the case.
+                                # only.
+                                #
                                 # This flag is purposefully omitted in usage.
                                 # The supported way to forcefully reindex is to
                                 # kill any pkg.depot using that directory,
@@ -395,6 +394,7 @@
                                 # pkg.depot process. The index will be rebuilt
                                 # automatically on startup.
                                 reindex = True
+                                exit_ready = True
                         elif opt == "--set-property":
                                 try:
                                         prop, p_value = arg.split("=", 1)
@@ -560,7 +560,7 @@
 
         # If the program is going to reindex, the port is irrelevant since
         # the program will not bind to a port.
-        if not reindex and not exit_ready:
+        if not exit_ready:
                 try:
                         cherrypy.process.servers.check_port(HOST_DEFAULT, port)
                 except Exception, e:
@@ -607,7 +607,7 @@
         # Setup SSL if requested.
         key_data = None
         ssl_dialog = dconf.get_property("pkg", "ssl_dialog")
-        if not reindex and ssl_cert_file and ssl_key_file and \
+        if not exit_ready and ssl_cert_file and ssl_key_file and \
             ssl_dialog != "builtin":
                 cmdline = None
                 def get_ssl_passphrase(*ignored):
@@ -737,17 +737,25 @@
         # remaining preparation.
 
         # Initialize repository state.
-        fork_allowed = not reindex and not exit_ready  
+        if not readonly:
+                # Not readonly, so assume a new repository should be created.
+                try:
+                        sr.repository_create(inst_root, properties=repo_props)
+                except sr.RepositoryExistsError:
+                        # Already exists, nothing to do.
+                        pass
+                except (api_errors.ApiException, sr.RepositoryError), _e:
+                        emsg("pkg.depotd: %s" % _e)
+                        sys.exit(1)
+
         try:
                 sort_file_max_size = dconf.get_property("pkg",
                     "sort_file_max_size")
 
-                repo = sr.Repository(auto_create=not readonly,
-                    cfgpathname=repo_config_file, file_root=file_root,
-                    fork_allowed=fork_allowed, log_obj=cherrypy,
-                    mirror=mirror, properties=repo_props, read_only=readonly,
-                    refresh_index=not add_content, repo_root=inst_root,
-                    sort_file_max_size=sort_file_max_size,
+                repo = sr.Repository(cfgpathname=repo_config_file,
+                    file_root=file_root, log_obj=cherrypy, mirror=mirror,
+                    properties=repo_props, read_only=readonly,
+                    root=inst_root, sort_file_max_size=sort_file_max_size,
                     writable_root=writable_root)
         except (RuntimeError, sr.RepositoryError), _e:
                 emsg("pkg.depotd: %s" % _e)
@@ -759,14 +767,23 @@
                 emsg("pkg.depotd: %s" % str(_e))
                 sys.exit(1)
 
+        if not rebuild and not add_content and not repo.mirror and \
+            not (repo.read_only and not repo.writable_root):
+                # Automatically update search indexes on startup if not already
+                # told to, and not in readonly/mirror mode.
+                reindex = True
+
         if reindex:
-                # Initializing the repository above updated search indices
-                # as needed; nothing left to do, so exit.
-                sys.exit(0)
-
-        if rebuild:
                 try:
-                        repo.rebuild()
+                        if repo.root:
+                                repo.refresh_index()
+                except (sr.RepositoryError, search_errors.IndexingException,
+                    api_errors.ApiException), e:
+                        emsg(str(e), "INDEX")
+                        sys.exit(1)
+        elif rebuild:
+                try:
+                        repo.rebuild(build_index=True)
                 except sr.RepositoryError, e:
                         emsg(str(e), "REBUILD")
                         sys.exit(1)
@@ -775,10 +792,10 @@
                     api_errors.PermissionsException), e:
                         emsg(str(e), "INDEX")
                         sys.exit(1)
-
         elif add_content:
                 try:
                         repo.add_content()
+                        repo.refresh_index()
                 except sr.RepositoryError, e:
                         emsg(str(e), "ADD_CONTENT")
                         sys.exit(1)
@@ -788,7 +805,7 @@
                         emsg(str(e), "INDEX")
                         sys.exit(1)
 
-        # ready to start depot; exit now if requested
+        # Ready to start depot; exit now if requested.
         if exit_ready:
                 sys.exit(0)
 
--- a/src/gui/modules/misc_non_gui.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/gui/modules/misc_non_gui.py	Thu Aug 19 23:33:49 2010 -0700
@@ -41,7 +41,7 @@
 
 # The current version of the Client API the PM, UM and
 # WebInstall GUIs have been tested against and are known to work with.
-CLIENT_API_VERSION = 40
+CLIENT_API_VERSION = 42
 LOG_DIR = "/var/tmp"
 LOG_ERROR_EXT = "_error.log"
 LOG_INFO_EXT = "_info.log"
--- a/src/man/pkg.depotd.1m.txt	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/man/pkg.depotd.1m.txt	Thu Aug 19 23:33:49 2010 -0700
@@ -5,7 +5,7 @@
      pkg.depotd - image packaging system depot server
 
 SYNOPSIS
-     /usr/lib/pkg.depotd [-d repo_dir] [-p port] [-s threads]
+     /usr/lib/pkg.depotd [-d inst_root] [-p port] [-s threads]
          [-t socket_timeout] [--add-content] [--cfg] [--content-root]
          [--debug] [--disable-ops=<op[/1]>[,...]] [--log-access]
          [--log-errors] [--mirror] [--proxy-base url] [--readonly]
@@ -248,7 +248,7 @@
      '--cfg', the following options can be used to alter the default
      behavior of the depot server:
 
-     -d repo_dir                See pkg/repo_dir above.
+     -d inst_root               See pkg/inst_root above.
 
      -p port                    See pkg/port above.
 
--- a/src/man/pkgrepo.1.txt	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/man/pkgrepo.1.txt	Thu Aug 19 23:33:49 2010 -0700
@@ -5,117 +5,227 @@
      pkgrepo - image packaging system repository management utility
 
 SYNOPSIS
-     /usr/bin/pkgrepo [options] command [cmd_options] [operands]
+     pkgrepo create [--version] uri_or_path
 
-     /usr/bin/pkgrepo create uri_or_path
+     pkgrepo add-signing-ca-cert [-p publisher ...]
+         [-s repo_uri_or_path] path ...
 
-     /usr/bin/pkgrepo property [-F format] [-H]
-         [<section/property> ...]
-     /usr/bin/pkgrepo set-property <section/property>=<value> or 
-         <section/property>=(["<value>", ...])
+     pkgrepo add-signing-intermediate-cert [-p publisher ...]
+         [-s repo_uri_or_path] path ...
 
-     /usr/bin/pkgrepo publisher [-F format] [-H] [publisher ...]
+     pkgrepo get [-p publisher ...] [-s repo_uri_or_path]
+         [section/property ...]
+
+     pkgrepo info [-F format] [-H] [-p publisher ...]
+         [-s repo_uri_or_path]
 
-     /usr/bin/pkgrepo rebuild [--no-index]
-     /usr/bin/pkgrepo refresh [--no-catalog] [--no-index]
+     pkgrepo rebuild [-s repo_uri_or_path] [--no-catalog]
+         [--no-index]
+
+     pkgrepo refresh [-s repo_uri_or_path] [--no-catalog]
+         [--no-index]
+
+     pkgrepo remove-signing-ca-cert [-p publisher ...]
+         [-s repo_uri_or_path] hash ...
 
-     /usr/bin/pkgrepo add-signing-ca-cert path ...
-     /usr/bin/pkgrepo add-signing-intermediate-cert path ...
-     /usr/bin/pkgrepo remove-signing-ca-cert hash ...
-     /usr/bin/pkgrepo remove-signing-intermediate-cert hash ...
+     pkgrepo remove-signing-intermediate-cert [-p publisher ...]
+         [-s repo_uri_or_path] hash ...
 
-     /usr/bin/pkgrepo version
-     /usr/bin/pkgrepo help
+     pkgrepo set [-p publisher] [-s repo_uri_or_path]
+         section/property=[value] ... or
+         section/property=([value]) ...
+
+     pkgrepo help
+     pkgrepo version
 
 DESCRIPTION
-     pkgrepo provides the ability to create, manage, and manage pkg(5)
-     package repositories.  Package repositories are a pre-defined set
-     of directories and files that permit the storage and retrieval of
+     pkgrepo provides the ability to create and manage pkg(5) package
+     repositories.  Package repositories are a pre-defined set of
+     directories and files that permit the storage and retrieval of
      package data by pkg(1) and publication clients such as pkgsend(1)
      or pkgrecv(1).  In addition, when network-based access to a
      package repository is needed, pkg.depotd(1m) can provide clients
-     access to store and/or retrieve package data.
+     access to the repository to store and/or retrieve package data.
 
 OPTIONS
      The following options are supported:
 
-     -s repo_uri_or_path
-          A URI or filesystem path representing the location of a
-          package repository. Currently, only filesystem-based
-          repositories are supported.
-
      --help or -?
           Displays a usage message.
 
 SUBCOMMANDS
      The following subcommands are supported:
 
-     add-signing-ca-certs path ...
-          Add the certificates provided as approved CA certificates for the 
-          publisher.
+     create [--version] <uri_or_path>
+          May only be used with filesystem-based repositories.
+
+          Creates a pkg(5) repository at the specified location.
+
+          With --version, create a repository in a format compatible
+          with the specified version.  By default, version 4
+          repositories are created.  Supported versions are:
+
+               3    Supports storage of packages for a single
+                    publisher, catalog version 1, and search
+                    version 1.
+
+               4    Supports storage of packages for multiple
+                    publishers, catalog version 1, and search
+                    version 1.
 
-     add-signing-intermediate-certs path ...
-          Add the certificates provided as intermediate certificates for the 
-          publisher.
+     add-signing-ca-certs [-p publisher ...]
+       [-s repo_uri_or_path] path ...
+          May only be used with filesystem-based repositories.
+
+          Add the certificates provided as approved CA certificates.
+
+          With -p, only add certificate data for the given publisher.
+          The special value 'all' may be used to add the certificate
+          data for all publishers.  If not specified, the default
+          publisher will be used.
+
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
+
+     add-signing-intermediate-certs [-p publisher ...]
+       [-s repo_uri_or_path] path ...
+          May only be used with filesystem-based repositories.
+
+          Add the certificates provided as intermediate certificates.
 
-     create <uri_or_path>
-          Creates a pkg(5) repository at the specified location.  This
-          command can only be used to create filesystem-based
-          repositories.
+          With -p, only add certificate data for the given publisher.
+          The special value 'all' may be used to add the certificate
+          data for all publishers.  If not specified, the default
+          publisher will be used.
+
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
+
+     get [-F format] [-H] [-s repo_uri_or_path] [section/property ...]
+          Displays the property information for the repository or
+          its publishers.
+
+          By default, each property and its value are printed on
+          separate lines.  Empty ASCII string values are represented
+          by a  pair of double  quotes ("").  Bourne  shell  meta-
+          characters (';', '&', '(', ')', '|', '^', '<', '>', newline,
+          space, tab, backslash, '"', single-quote, '`') in ASCII
+          string values are quoted by backslashes (\).
 
-     property [-F format] [-H] [<section/property> ...]
-          Display repository property information.  With no arguments,
-          display the section, name, and values of all properties.  If
-          a specific list of property names is requested, only the
-          matching ones are displayed.
+          With -F, specify an alternative output format.  Currently,
+          only 'tsv' (Tab Separated Values) is valid.
+
+          With -H, omit the headers from the listing.
+
+          With -p, display the property information for the given
+          publisher.  The special value 'all' can be used to display
+          the properties for all publishers.  This option may be
+          specified multiple times.
+
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
+
+     info [-F format] [-H] [-p publisher ...] [-s repo_uri_or_path]
+          Displays a listing of the package publishers known by the
+          repository.  The listing includes the number of packages
+          for each publisher, when the publisher's package data was
+          last updated, and the status of the publisher's package
+          data (such as whether it is currently being processed).
 
           With -F, specify an alternative output format.  Currently,
           only 'tsv' (Tab Separated Values) is valid.
 
           With -H, omit the headers from the listing.
 
-     set-property <section/property>=<value> or
-         <section/property>=(["<value>", ...])
-
-          Update the value of an existing repository property or add
-          a new one.
+          With -p, only display the data for the given publisher.
+          If not provided, the data for all publishers will be
+          displayed.  This option may be specified multiple times.
 
-     publisher [-F format] [-H] [<publisher> ...]
-          Lists the publishers of packages in the specified repository
-          with a summary of the number of packages for each publisher
-          and when the publisher's package data was last updated.
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
 
-          With -F, specify an alternative output format.  Currently,
-          only 'tsv' (Tab Separated Values) is valid.
-
-          With -H, omit the headers from the listing.
-
-     rebuild [--no-index]
+     rebuild [-s repo_uri_or_path] [--no-catalog] [--no-index]
           Discards all catalog, search, and other cached information
           found in the repository and then re-creates it based on the
-          current contents of the repository.  This command can only
-          be used with filesystem-based repositories.
+          current contents of the repository.
+
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
 
-          With --no-index, don't update search indexes.
+          With --no-catalog, don't rebuild package data.
 
-     refresh [--no-catalog] [--no-index]
+          With --no-index, don't rebuild search indexes.
+
+     refresh [-s repo_uri_or_path] [--no-catalog] [--no-index]
           Catalogs any new packages found in the repository and
           updates all search indexes.  This is intended for use with
           deferred publication (--no-catalog or --no-index options of
-          pkgsend).  This command can only be used with filesystem-
-          based repositories.
+          pkgsend).
+
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
 
           With --no-catalog, don't add any new packages.
 
           With --no-index, don't update search indexes.
 
-     remove-signing-ca-certs hash ...
-          Remove the certificates provided from the list of CA certificates for
-          the publisher.
+     remove-signing-ca-certs [-p publisher ...]
+       [-s repo_uri_or_path] hash ...
+          May only be used with filesystem-based repositories.
+
+          Remove the certificates provided from the list of CA
+          certificates.
+
+          With -p, only remove certificate data for the given
+          publisher.  The special value 'all' may be used to remove
+          the certificate data for all publishers.  If not specified,
+          the default publisher will be used.
+
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
+
+     remove-signing-intermediate-certs [-p publisher ...]
+       [-s repo_uri_or_path] hash ...
+          May only be used with filesystem-based repositories.
+
+          Remove the certificates provided from the list of
+          intermediate certificates.
+
+          With -p, only remove certificate data for the given
+          publisher.  The special value 'all' may be used to remove
+          the certificate data for all publishers.  If not specified,
+          the default publisher will be used.
 
-     remove-signing-intermediate-certs hash ...
-          Remove the certificates provided from the list of intermediate 
-          certificates for the publisher.
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
+
+     set [-p publisher] [-s repo_uri_or_path]
+       [section/property=value ...]
+          May only be used with filesystem-based repositories.
+
+          Sets the value of the specified properties for the
+          repository or publishers.
+
+          With -p, only set property data for the given publisher.
+          The special value 'all' may be used to set the property
+          for all publishers.
+
+          With -s, operate on the repository located at the given URI
+          or filesystem path.
+
+          Properties and values may be specified using one of the
+          following forms:
+
+               section/property=
+                    The property's value will be cleared.
+
+               section/property=value
+                    The property's value will replaced with the given
+                    value.
+
+               section/property=(value1 value2 valueN)
+                    The property's value will be replaced with the
+                    list of values.
 
      version
           Display a unique string identifying the version of the
@@ -131,33 +241,73 @@
      Example 2:  Display a summary of publishers and the number of
          packages in a repository:
 
-     $ pkgrepo -s /my/repository publisher
-     PUBLISHER   PACKAGES VERSIONS UPDATED
-     example.com 5        8        May 22, 2010 12:06:03 PM
+     $ pkgrepo info -s /my/repository
+     PUBLISHER   PACKAGES STATUS UPDATED
+     example.com 5        online May 22, 2010 12:06:03 PM
 
-     $ pkgrepo -s http://example.com/repository
-     PUBLISHER   PACKAGES VERSIONS UPDATED
-     example.com 5        8        May 22, 2010 12:06:03 PM
+     $ pkgrepo info -s http://example.com/repository
+     PUBLISHER   PACKAGES STATUS UPDATED
+     example.com 5        online May 22, 2010 12:06:03 PM
 
      Example 3:  Rebuild the repository's catalogs and search data.
 
-     $ pkgrepo -s /my/repository rebuild
+     $ pkgrepo rebuild -s /my/repository
 
      Example 4:  Refresh the repository's catalogs and search data.
 
-     $ pkgrepo -s /my/repository refresh
+     $ pkgrepo refresh -s /my/repository
+
+     $ pkgrepo refresh -s http://example.com/repository
+
+     Example 5:  Display all repository properties.
+
+     $ pkgrepo get -s /my/repository
+     PUBLISHER SECTION    PROPERTY VALUE
+               publisher  prefix   example.com
+
+     Example 6:  Display all publisher properties.
 
-     $ pkgrepo -s http://example.com/repository refresh
+     $ pkgrepo get -s /my/repository -p all
+     PUBLISHER   SECTION    PROPERTY VALUE
+     example.com repository origins  http://example.com/repository
+     example.net repository origins  http://example.net/repository
+
+     Example 7:  Set a repository property.
 
-     Example 5:  Add the certificate stored in /tmp/example_file.pem as a
-     signing CA certificate for the repository located at /my/repository.
+     $ pkgrepo set -s /my/repository publisher/prefix=example.com
+
+     Example 8:  Set a publisher property.
+
+     $ pkgrepo set -s /my/repository -p example.com \
+         repository/origins=http://example.com/repository
 
-     $ pkgrepo -s /my/repository add-signing-ca-cert /tmp/example_file.pem
+     Example 9:  Add the certificate stored in /tmp/example_file.pem
+     as a signing CA certificate for the repository located at
+     /my/repository for the default publisher.
+
+     $ pkgrepo add-signing-ca-cert -s /my/repository \
+         /tmp/example_file.pem
+
+     Example 10:  Add the certificate stored in /tmp/example_file.pem
+     as a signing CA certificate for the repository located at
+     /my/repository for a specific publisher.
+
+     $ pkgrepo add-signing-ca-cert -s /my/repository \
+         -p example.com /tmp/example_file.pem
 
-     Example 6:  Remove the certificate with hash a12345 from the list of
-     intermediate certificates for the repository located at /my/repository.
+     Example 11:  Remove the certificate with hash a12345 from the
+     list of intermediate certificates for the repository located at
+     /my/repository for the default publisher.
+
+     $ pkgrepo remove-signing-intermediate-ca-cert -s /my/repository \
+         a12345
 
-     $ pkgrepo -s /my/repository remove-signing-intermediate-ca-cert a12345
+     Example 12:  Remove the certificate with hash a12345 from the
+     list of intermediate certificates for the repository located at
+     /my/repository for a specific publisher.
+
+     $ pkgrepo remove-signing-intermediate-ca-cert -s /my/repository \
+         -p example.com a12345
 
 EXIT STATUS
      The following exit values are returned:
--- a/src/modules/client/api.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/api.py	Thu Aug 19 23:33:49 2010 -0700
@@ -60,7 +60,7 @@
 from pkg.client.imageplan import EXECUTED_OK
 from pkg.client import global_settings
 
-CURRENT_API_VERSION = 41
+CURRENT_API_VERSION = 42
 CURRENT_P5I_VERSION = 1
 
 # Image type constants.
@@ -733,19 +733,18 @@
                                 # Must be done after bootenv restore.
                                 self.log_operation_end(error=e)
                                 raise
+                        except search_errors.IndexLockedException, e:
+                                error = apx.IndexLockedException(e)
+                                self.log_operation_end(error=error)
+                                raise error
                         except search_errors.ProblematicPermissionsIndexException, e:
                                 error = apx.ProblematicPermissionsIndexException(e)
                                 self.log_operation_end(error=error)
                                 raise error
-                        except (search_errors.InconsistentIndexException,
-                            search_errors.PartialIndexingException), e:
+                        except search_errors.InconsistentIndexException, e:
                                 error = apx.CorruptedIndexException(e)
                                 self.log_operation_end(error=error)
                                 raise error
-                        except search_errors.MainDictParsingException, e:
-                                error = apx.MainDictParsingException(e)
-                                self.log_operation_end(error=error)
-                                raise error
                         except actuator.NonzeroExitException, e:
                                 # Won't happen during image-update
                                 be.restore_install_uninstall()
--- a/src/modules/client/api_errors.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/api_errors.py	Thu Aug 19 23:33:49 2010 -0700
@@ -671,16 +671,6 @@
         pass
 
 
-class MainDictParsingException(SearchException):
-        """This is used when the main dictionary could not parse a line."""
-        def __init__(self, e):
-                SearchException.__init__(self)
-                self.e = e
-
-        def __str__(self):
-                return str(self.e)
-
-
 class MalformedSearchRequest(SearchException):
         """Raised when the server cannot understand the format of the
         search request."""
@@ -794,6 +784,18 @@
                 return str(self.exception)
 
 
+class IndexLockedException(IndexingException):
+        """This is used when an attempt to modify an index locked by another
+        process or thread is made."""
+
+        def __init__(self, e):
+                IndexingException.__init__(self, e)
+                self.exception = e
+
+        def __str__(self):
+                return str(self.exception)
+
+
 class ProblematicPermissionsIndexException(IndexingException):
         """ This is used when the indexer is unable to create, move, or remove
         files or directories it should be able to. """
@@ -1071,6 +1073,13 @@
                     "op": self._args.get("operation", None) }
 
 
+class BadPublisherAlias(PublisherError):
+        """Used to indicate that a publisher alias is not valid."""
+
+        def __str__(self):
+                return _("'%s' is not a valid publisher alias.") % self.data
+
+
 class BadPublisherPrefix(PublisherError):
         """Used to indicate that a publisher name is not valid."""
 
@@ -1755,7 +1764,7 @@
 
 
 class ServerReturnError(ApiException):
-        """This exception is used when the server reutrns a line which the
+        """This exception is used when the server returns a line which the
         client cannot parse correctly."""
 
         def __init__(self, line):
--- a/src/modules/client/history.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/history.py	Thu Aug 19 23:33:49 2010 -0700
@@ -88,7 +88,6 @@
     api_errors.ImageUpdateOnLiveImageException: RESULT_FAILED_BAD_REQUEST,
     api_errors.ProblematicPermissionsIndexException: RESULT_FAILED_STORAGE,
     api_errors.PermissionsException: RESULT_FAILED_STORAGE,
-    api_errors.MainDictParsingException: RESULT_FAILED_STORAGE,
     api_errors.SearchException: RESULT_FAILED_SEARCH,
     api_errors.PlanCreationException: RESULT_FAILED_CONSTRAINED,
     api_errors.NonLeafPackageException: RESULT_FAILED_CONSTRAINED,
--- a/src/modules/client/image.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/image.py	Thu Aug 19 23:33:49 2010 -0700
@@ -54,6 +54,7 @@
 import pkg.fmri
 import pkg.manifest                     as manifest
 import pkg.misc                         as misc
+import pkg.nrlock
 import pkg.portable                     as portable
 import pkg.server.catalog
 import pkg.version
@@ -209,7 +210,6 @@
                 self.pkgdir = None
                 self.root = root
                 self.__lock = pkg.nrlock.NRLock()
-                self.__locked = False
                 self.__lockf = None
                 self.__sig_policy = None
                 self.__trust_anchors = None
@@ -315,7 +315,7 @@
                 """Returns a boolean value indicating whether the image is
                 currently locked."""
 
-                return self.__locked
+                return self.__lock and self.__lock.locked
 
         @contextmanager
         def locked_op(self, op, allow_unprivileged=False):
@@ -362,7 +362,6 @@
                 if not self.__lock.acquire(blocking=blocking):
                         raise api_errors.ImageLockedError()
 
-                self.__locked = True
                 try:
                         # Attempt to obtain a file lock.
                         self.__lock_process()
@@ -433,6 +432,7 @@
                         lf.flush()
                         self.__lockf = lf
                 except EnvironmentError, e:
+                        lf.close()
                         if e.errno == errno.EACCES:
                                 raise api_errors.PermissionsException(
                                     e.filename)
@@ -440,19 +440,23 @@
                                 raise api_errors.ReadOnlyFileSystemException(
                                     e.filename)
                         raise
+                except:
+                        lf.close()
+                        raise
 
         def unlock(self):
                 """Unlocks the image."""
 
-                if self.__lockf:
-                        # To avoid race conditions with the next caller waiting
-                        # for the lock file, it is simply truncated instead of
-                        # removed.
-                        self.__lockf.truncate(0)
-                        self.__lockf.close()
+                try:
+                        if self.__lockf:
+                                # To avoid race conditions with the next caller
+                                # waiting for the lock file, it is simply
+                                # truncated instead of removed.
+                                self.__lockf.truncate(0)
+                                self.__lockf.close()
+                finally:
                         self.__lockf = None
-                self.__locked = False
-                self.__lock.release()
+                        self.__lock.release()
 
         def image_type(self, d):
                 """Returns the type of image at directory: d; or None"""
@@ -585,7 +589,7 @@
                 # If current image is locked, then it should be unlocked
                 # and then relocked after the imgdir is changed.  This
                 # ensures that alternate BE scenarios work.
-                relock = self.imgdir and self.__locked
+                relock = self.imgdir and self.locked
                 if relock:
                         self.unlock()
 
@@ -659,7 +663,13 @@
 
                 # Ensure structure for publishers is valid.
                 for pub in self.gen_publishers():
-                        pub.create_meta_root()
+                        try:
+                                pub.create_meta_root()
+                        except api_errors.PermissionsException:
+                                # Assume that an unprivileged user is attempting
+                                # to use the image after a publisher's metadata
+                                # was removed.
+                                continue
 
                 # Once its structure is valid, then ensure state information
                 # is intact.
--- a/src/modules/client/imageconfig.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/imageconfig.py	Thu Aug 19 23:33:49 2010 -0700
@@ -398,7 +398,7 @@
                         c.set(section, "revoked_ca_certs", str(
                             pub.revoked_ca_certs))
                         c.set(section, "intermediate_certs", str(
-                            pub.inter_certs))
+                            pub.intermediate_certs))
                         c.set(section, "disabled", str(pub.disabled))
                         c.set(section, "sticky", str(pub.sticky))
 
@@ -542,10 +542,10 @@
                         revoked_ca_certs = []
 
                 try:
-                        inter_certs = self.read_list(cp.get(s,
+                        intermediate_certs = self.read_list(cp.get(s,
                             "intermediate_certs"))
                 except ConfigParser.NoOptionError:
-                        inter_certs = []
+                        intermediate_certs = []
                 try:
                         signature_policy = cp.get(s, "signature_policy")
                 except ConfigParser.NoOptionError:
@@ -741,8 +741,8 @@
                 pub = publisher.Publisher(prefix, alias=alias,
                     client_uuid=client_uuid, disabled=disabled,
                     meta_root=pmroot, repositories=[r], sticky=sticky,
-                    ca_certs=ca_certs, inter_certs=inter_certs, props=props,
-                    revoked_ca_certs=revoked_ca_certs,
+                    ca_certs=ca_certs, intermediate_certs=intermediate_certs,
+                    props=props, revoked_ca_certs=revoked_ca_certs,
                     approved_ca_certs=approved_ca_certs)
 
                 # write out the UUID if it was set
--- a/src/modules/client/publisher.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/publisher.py	Thu Aug 19 23:33:49 2010 -0700
@@ -879,8 +879,9 @@
 
         def __init__(self, prefix, alias=None, client_uuid=None, disabled=False,
             meta_root=None, repositories=None, selected_repository=None,
-            transport=None, sticky=True, ca_certs=EmptyI, inter_certs=EmptyI,
-            props=None, revoked_ca_certs=EmptyI, approved_ca_certs=EmptyI):
+            transport=None, sticky=True, ca_certs=EmptyI,
+            intermediate_certs=EmptyI, props=None, revoked_ca_certs=EmptyI,
+            approved_ca_certs=EmptyI):
                 """Initialize a new publisher object."""
 
                 if client_uuid is None:
@@ -915,8 +916,8 @@
 
                 # Writing out an EmptyI to a config file and reading it back
                 # in doesn't work correctly at the moment, but reading and
-                # writing an empty list does. So if inter_certs is empty, make
-                # sure it's stored as an empty list.
+                # writing an empty list does. So if intermediate_certs is empty,
+                # make sure it's stored as an empty list.
                 #
                 # The relevant implementation is probably the line which
                 # strips ][ from the input in imageconfig.read_list.
@@ -935,10 +936,10 @@
                 else:
                         self.approved_ca_certs = []
 
-                if inter_certs:
-                        self.inter_certs = inter_certs
+                if intermediate_certs:
+                        self.intermediate_certs = intermediate_certs
                 else:
-                        self.inter_certs = []
+                        self.intermediate_certs = []
 
                 if props:
                         self.properties.update(props)
@@ -978,7 +979,7 @@
                     props=self.properties,
                     revoked_ca_certs=self.revoked_ca_certs,
                     approved_ca_certs=self.approved_ca_certs,
-                    inter_certs=self.inter_certs)
+                    intermediate_certs=self.intermediate_certs)
                 pub._source_object_id = id(self)
                 return pub
 
@@ -1037,6 +1038,12 @@
                 return True
 
         def __set_alias(self, value):
+                # Aliases must comply with the same restrictions that prefixes
+                # have as they are intended to be useable in any case where
+                # a prefix may be used.
+                if value is not None and value != "" and \
+                    not misc.valid_pub_prefix(value):
+                        raise api_errors.BadPublisherAlias(value)
                 self.__alias = value
 
         def __set_disabled(self, disabled):
@@ -1773,8 +1780,11 @@
                 pubs = None
                 try:
                         pubs = self.transport.get_publisherdata(repo)
-                except api_errors.UnsupportedRepositoryOperation:
-                        # Nothing more can be done.
+                except (api_errors.TransportError,
+                    api_errors.UnsupportedRepositoryOperation):
+                        # Nothing more can be done (because the target origin
+                        # can't be contacted, or beacuse it doesn't support
+                        # retrievel of publisher configuration data).
                         return
 
                 if not pubs:
@@ -1984,7 +1994,7 @@
                 were necessary to validate its CA certificates against the
                 image's trust anchors."""
 
-                for c in self.inter_certs:
+                for c in self.intermediate_certs:
                         self.get_cert_by_hash(c, verify_hash=True)
 
         def update_props(self, set_props=EmptyI, add_prop_values=EmptyI,
--- a/src/modules/client/transport/repo.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/transport/repo.py	Thu Aug 19 23:33:49 2010 -0700
@@ -29,6 +29,7 @@
 import httplib
 import itertools
 import os
+import simplejson as json
 import sys
 import urlparse
 import urllib
@@ -51,19 +52,19 @@
         the operations that are performed against a repo.  Subclasses
         should implement protocol specific repo modifications."""
 
-        def do_search(self, data, header=None, ccancel=None):
+        def do_search(self, data, header=None, ccancel=None, pub=None):
                 """Perform a search request."""
 
                 raise NotImplementedError
 
-        def get_catalog(self, ts=None, header=None, ccancel=None):
+        def get_catalog(self, ts=None, header=None, ccancel=None, pub=None):
                 """Get the catalog from the repo.  If ts is defined,
                 request only changes newer than timestamp ts."""
 
                 raise NotImplementedError
 
         def get_catalog1(self, filelist, destloc, header=None, ts=None,
-            progtrack=None, revalidate=False, redownload=False):
+            progtrack=None, pub=None, revalidate=False, redownload=False):
                 """Get the files that make up the catalog components
                 that are listed in 'filelist'.  Download the files to
                 the directory specified in 'destloc'.  The caller
@@ -77,13 +78,13 @@
 
                 raise NotImplementedError
 
-        def get_datastream(self, fhash, version, header=None, ccancel=None):
+        def get_datastream(self, fhash, version, header=None, ccancel=None, pub=None):
                 """Get a datastream from a repo.  The name of the
                 file is given in fhash."""
 
                 raise NotImplementedError
 
-        def get_files(self, filelist, dest, progtrack, version, header=None):
+        def get_files(self, filelist, dest, progtrack, version, header=None, pub=None):
                 """Get multiple files from the repo at once.
                 The files are named by hash and supplied in filelist.
                 If dest is specified, download to the destination
@@ -91,14 +92,14 @@
 
                 raise NotImplementedError
 
-        def get_manifest(self, fmri, header=None, ccancel=None):
+        def get_manifest(self, fmri, header=None, ccancel=None, pub=None):
                 """Get a manifest from repo.  The name of the
                 package is given in fmri.  If dest is set, download
                 the manifest to dest."""
 
                 raise NotImplementedError
 
-        def get_manifests(self, mfstlist, dest, progtrack=None):
+        def get_manifests(self, mfstlist, dest, progtrack=None, pub=None):
                 """Get manifests named in list.  The mfstlist argument contains
                 tuples (fmri, header).  This is so that each manifest may have
                 unique header information.  The destination directory is spec-
@@ -112,6 +113,11 @@
 
                 raise NotImplementedError
 
+        def get_status(self, header=None, ccancel=None):
+                """Get status from the repository."""
+
+                raise NotImplementedError
+
         def get_url(self):
                 """Return's the Repo's URL."""
 
@@ -142,12 +148,12 @@
 
                 raise NotImplementedError
 
-        def publish_close(self, header=None, trans_id=None, refresh_index=False,
+        def publish_close(self, header=None, trans_id=None,
             add_to_catalog=False):
                 """The close operation tells the Repository to commit
                 the transaction identified by trans_id.  The caller may
-                specify refresh_index and add_to_catalog, if needed.
-                This method returns a (publish-state, fmri) tuple."""
+                specify add_to_catalog, if needed.  This method returns a
+                (publish-state, fmri) tuple."""
 
                 raise NotImplementedError
 
@@ -159,17 +165,39 @@
 
                 raise NotImplementedError
 
-        def publish_append(self, header=None, client_release=None,
-            pkg_name=None):
+        def publish_rebuild(self, header=None, pub=None):
+                """Attempt to rebuild the package data and search data in the
+                repository."""
+
                 raise NotImplementedError
 
-        def publish_refresh_index(self, header=None):
-                """If the Repo points to a Repository that has a refresh-able
-                index, refresh the index."""
+        def publish_rebuild_indexes(self, header=None, pub=None):
+                """Attempt to rebuild the search data in the repository."""
+
+                raise NotImplementedError
+
+        def publish_rebuild_packages(self, header=None, pub=None):
+                """Attempt to rebuild the package data in the repository."""
 
                 raise NotImplementedError
 
-        def touch_manifest(self, fmri, header=None, ccancel=None):
+        def publish_refresh(self, header=None, pub=None):
+                """Attempt to refresh the package data and search data in the
+                repository."""
+
+                raise NotImplementedError
+
+        def publish_refresh_indexes(self, header=None, pub=None):
+                """Attempt to refresh the search data in the repository."""
+
+                raise NotImplementedError
+
+        def publish_refresh_packages(self, header=None, pub=None):
+                """Attempt to refresh the package data in the repository."""
+
+                raise NotImplementedError
+
+        def touch_manifest(self, fmri, header=None, ccancel=None, pub=None):
                 """Send data about operation intent without actually
                 downloading a manifest."""
 
@@ -211,7 +239,7 @@
                 msg = None
                 if not content:
                         return msg
-        
+
                 from xml.dom.minidom import Document, parse
                 dom = parse(cStringIO.StringIO(content))
                 msg = ""
@@ -301,46 +329,86 @@
                     sock_path=self._sock_path, data_fobj=data_fobj,
                     data_fp=data_fp, failonerror=failonerror)
 
+        def __check_response_body(self, fobj):
+                """Parse the response body found accessible using the provided
+                filestream object and raise an exception if appropriate."""
+
+                try:
+                        fobj.free_buffer = False
+                        fobj.read()
+                except tx.TransportProtoError, e:
+                        if e.code == httplib.BAD_REQUEST:
+                                exc_type, exc_value, exc_tb = sys.exc_info()
+                                try:
+                                        e.details = self._parse_html_error(
+                                            fobj.read())
+                                except:
+                                        # If parse fails, raise original
+                                        # exception.
+                                        raise exc_value, None, exc_tb
+                        raise
+                finally:
+                        fobj.close()
+
         def add_version_data(self, verdict):
                 """Cache the information about what versions a repository
                 supports."""
 
                 self._verdata = verdict
 
-        def do_search(self, data, header=None, ccancel=None):
+        def __get_request_url(self, methodstr, query=None, pub=None):
+                """Generate the request URL for the given method and
+                publisher.
+                """
+
+                base = self._repouri.uri
+
+                # Only append the publisher prefix if the publisher of the
+                # request is known, not already part of the URI, if this isn't
+                # an open operation, and if the repository supports version 1
+                # of the publisher opation.  The prefix shouldn't be appended
+                # for open because the publisher may not yet be known to the
+                # repository, and not in other cases because the repository
+                # doesn't support it.
+                pub_prefix = getattr(pub, "prefix", None)
+                if pub_prefix and not methodstr.startswith("open/") and \
+                    not base.endswith("/%s/" % pub_prefix) and \
+                    self.supports_version("publisher", [1]):
+                        # Append the publisher prefix to the repository URL.
+                        base = urlparse.urljoin(base, pub_prefix)
+
+                uri = urlparse.urljoin(base, methodstr)
+                if not query:
+                        return uri
+
+                # If a set of query data was provided, then decompose the URI
+                # into its component parts and replace the query portion with
+                # the encoded version of the new query data.
+                components = list(urlparse.urlparse(uri))
+                components[4] = urllib.urlencode(query)
+                return urlparse.urlunparse(components)
+
+        def do_search(self, data, header=None, ccancel=None, pub=None):
                 """Perform a remote search against origin repos."""
 
-                methodstr = "search/1/"
-
+                requesturl = self.__get_request_url("search/1/", pub=pub)
                 if len(data) > 1:
-                        requesturl = urlparse.urljoin(self._repouri.uri,
-                            methodstr)
+                        # Post and retrieve.
                         request_data = urllib.urlencode(
-                            [(i, str(q))
-                            for i, q in enumerate(data)])
-
-                        resp = self._post_url(requesturl, request_data,
+                            [(i, str(q)) for i, q in enumerate(data)])
+                        return self._post_url(requesturl, request_data,
                             header, ccancel=ccancel)
 
-                else:
-                        baseurl = urlparse.urljoin(self._repouri.uri,
-                            methodstr)
-                        requesturl = urlparse.urljoin(baseurl, urllib.quote(
-                            str(data[0]), safe=''))
+                # Retrieval only.
+                requesturl = urlparse.urljoin(requesturl, urllib.quote(
+                    str(data[0]), safe=''))
+                return self._fetch_url(requesturl, header, ccancel=ccancel)
 
-                        resp = self._fetch_url(requesturl, header,
-                            ccancel=ccancel)
-
-                return resp
-
-        def get_catalog(self, ts=None, header=None, ccancel=None):
+        def get_catalog(self, ts=None, header=None, ccancel=None, pub=None):
                 """Get the catalog from the repo.  If ts is defined,
                 request only changes newer than timestamp ts."""
 
-                methodstr = "catalog/0/"
-
-                requesturl = urlparse.urljoin(self._repouri.uri, methodstr)
-
+                requesturl = self.__get_request_url("catalog/0/", pub=pub)
                 if ts:
                         if not header:
                                 header = {"If-Modified-Since": ts}
@@ -351,7 +419,7 @@
                     ccancel=ccancel)
 
         def get_catalog1(self, filelist, destloc, header=None, ts=None,
-            progtrack=None, revalidate=False, redownload=False):
+            progtrack=None, pub=None, revalidate=False, redownload=False):
                 """Get the files that make up the catalog components
                 that are listed in 'filelist'.  Download the files to
                 the directory specified in 'destloc'.  The caller
@@ -365,7 +433,7 @@
                 uses http's no-cache header, while revalidate uses
                 max-age=0."""
 
-                methodstr = "catalog/1/"
+                baseurl = self.__get_request_url("catalog/1/", pub=pub)
                 urllist = []
                 progclass = None
                 headers = {}
@@ -387,9 +455,6 @@
                 if progtrack:
                         progclass = ProgressCallback
 
-                # create URL for requests
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
-
                 for f in filelist:
                         url = urlparse.urljoin(baseurl, f)
                         urllist.append(url)
@@ -428,49 +493,51 @@
 
                 return self._annotate_exceptions(errors)
 
-        def get_datastream(self, fhash, version, header=None, ccancel=None):
+        def get_datastream(self, fhash, version, header=None, ccancel=None,
+            pub=None):
                 """Get a datastream from a repo.  The name of the
                 file is given in fhash."""
 
-                methodstr = "file/%s/" % version
+                # The only versions this operation is compatible with.
+                assert version == 0 or version == 1
 
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
+                baseurl = self.__get_request_url("file/%s/" % version, pub=pub)
                 requesturl = urlparse.urljoin(baseurl, fhash)
-
                 return self._fetch_url(requesturl, header, ccancel=ccancel)
 
         def get_publisherinfo(self, header=None, ccancel=None):
-                """Get publisher/0 information from the repository."""
+                """Get publisher information from the repository."""
 
-                requesturl = urlparse.urljoin(self._repouri.uri, "publisher/0/")
+                requesturl = self.__get_request_url("publisher/0/")
                 return self._fetch_url(requesturl, header, ccancel=ccancel)
 
-        def get_manifest(self, fmri, header=None, ccancel=None):
+        def get_status(self, header=None, ccancel=None):
+                """Get status/0 information from the repository."""
+
+                requesturl = self.__get_request_url("status/0")
+                return self._fetch_url(requesturl, header, ccancel=ccancel)
+
+        def get_manifest(self, fmri, header=None, ccancel=None, pub=None):
                 """Get a package manifest from repo.  The FMRI of the
                 package is given in fmri."""
 
-                methodstr = "manifest/0/"
-
                 mfst = fmri.get_url_path()
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
+                baseurl = self.__get_request_url("manifest/0/", pub=pub)
                 requesturl = urlparse.urljoin(baseurl, mfst)
 
                 return self._fetch_url(requesturl, header, compress=True,
                     ccancel=ccancel)
 
-        def get_manifests(self, mfstlist, dest, progtrack=None):
+        def get_manifests(self, mfstlist, dest, progtrack=None, pub=None):
                 """Get manifests named in list.  The mfstlist argument contains
                 tuples (fmri, header).  This is so that each manifest may have
                 unique header information.  The destination directory is spec-
                 ified in the dest argument."""
 
-                methodstr = "manifest/0/"
+                baseurl = self.__get_request_url("manifest/0/", pub=pub)
                 urlmapping = {}
                 progclass = None
 
-                # create URL for requests
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
-
                 if progtrack:
                         progclass = ProgressCallback
 
@@ -515,7 +582,7 @@
 
                 return self._annotate_exceptions(errors, urlmapping)
 
-        def get_files(self, filelist, dest, progtrack, version, header=None):
+        def get_files(self, filelist, dest, progtrack, version, header=None, pub=None):
                 """Get multiple files from the repo at once.
                 The files are named by hash and supplied in filelist.
                 If dest is specified, download to the destination
@@ -523,16 +590,13 @@
                 it contains a ProgressTracker object for the
                 downloads."""
 
-                methodstr = "file/%s/" % version
+                baseurl = self.__get_request_url("file/%s/" % version, pub=pub)
                 urllist = []
                 progclass = None
 
                 if progtrack:
                         progclass = FileProgress
 
-                # create URL for requests
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
-
                 for f in filelist:
                         url = urlparse.urljoin(baseurl, f)
                         urllist.append(url)
@@ -580,7 +644,7 @@
                 """Query the repo for versions information.
                 Returns a fileobject."""
 
-                requesturl = urlparse.urljoin(self._repouri.uri, "versions/0/")
+                requesturl = self.__get_request_url("versions/0/")
                 return self._fetch_url(requesturl, header, ccancel=ccancel)
 
         def has_version_data(self):
@@ -597,9 +661,8 @@
                 attrs = action.attrs
                 data_fobj = None
                 data = None
-                methodstr = "add/0/"
 
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
+                baseurl = self.__get_request_url("add/0/")
                 request_str = "%s/%s" % (trans_id, action.name)
                 requesturl = urlparse.urljoin(baseurl, request_str)
 
@@ -618,23 +681,7 @@
 
                 fobj = self._post_url(requesturl, header=headers,
                     data_fobj=data_fobj, data=data, failonerror=False)
-
-                try:
-                        fobj.free_buffer = False
-                        fobj.read()
-                except tx.TransportProtoError, e:
-                        if e.code == httplib.BAD_REQUEST:
-                                exc_type, exc_value, exc_tb = sys.exc_info()
-                                try:
-                                        e.details = self._parse_html_error(
-                                            fobj.read())
-                                except:
-                                        # If parse fails, raise original
-                                        # exception.
-                                        raise exc_value, None, exc_tb
-                        raise
-                finally:
-                        fobj.close()
+                self.__check_response_body(fobj)
 
         def publish_add_file(self, pth, header=None, trans_id=None):
                 """The publish operation that adds content to a repository.
@@ -643,11 +690,8 @@
                 id in trans_id."""
 
                 attrs = {}
-                methodstr = "file/1/"
-
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
-                request_str = "%s" % trans_id
-                requesturl = urlparse.urljoin(baseurl, request_str)
+                baseurl = self.__get_request_url("file/1/")
+                requesturl = urlparse.urljoin(baseurl, trans_id)
 
                 headers = dict(
                     ("X-IPkg-SetAttr%s" % i, "%s=%s" % (k, attrs[k]))
@@ -658,22 +702,16 @@
                         headers.update(header)
 
                 fobj = self._post_url(requesturl, header=headers, data_fp=pth)
+                self.__check_response_body(fobj)
 
-                # Discard response body
-                fobj.read()
-                
         def publish_abandon(self, header=None, trans_id=None):
                 """The 'abandon' publication operation, that tells a
                 Repository to abort the current transaction.  The caller
                 must specify the transaction id in trans_id. Returns
                 a (publish-state, fmri) tuple."""
 
-                methodstr = "abandon/0/"
-
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
-                request_str = trans_id
-                requesturl = urlparse.urljoin(baseurl, request_str)
-
+                baseurl = self.__get_request_url("abandon/0/")
+                requesturl = urlparse.urljoin(baseurl, trans_id)
                 fobj = self._fetch_url(requesturl, header=header,
                     failonerror=False)
 
@@ -698,25 +736,21 @@
 
                 return state, pkgfmri
 
-        def publish_close(self, header=None, trans_id=None, refresh_index=False,
+        def publish_close(self, header=None, trans_id=None,
             add_to_catalog=False):
                 """The close operation tells the Repository to commit
                 the transaction identified by trans_id.  The caller may
-                specify refresh_index and add_to_catalog, if needed.
-                This method returns a (publish-state, fmri) tuple."""
+                specify add_to_catalog, if needed.  This method returns a
+                (publish-state, fmri) tuple."""
 
-                methodstr = "close/0/"
                 headers = {}
-                if not refresh_index:
-                        headers["X-IPkg-Refresh-Index"] = 0
                 if not add_to_catalog:
                         headers["X-IPkg-Add-To-Catalog"] = 0
                 if header:
                         headers.update(header)
 
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
-                request_str = trans_id
-                requesturl = urlparse.urljoin(baseurl, request_str)
+                baseurl = self.__get_request_url("close/0/")
+                requesturl = urlparse.urljoin(baseurl, trans_id)
 
                 fobj = self._fetch_url(requesturl, header=headers,
                     failonerror=False)
@@ -749,14 +783,13 @@
                 client_release, and the package's name in pkg_name.
                 Returns a transaction-ID."""
 
-                methodstr = "open/0/"
-                return self.__start_trans(methodstr, header, client_release,
+                baseurl = self.__get_request_url("open/0/")
+                return self.__start_trans(baseurl, header, client_release,
                     pkg_name)
 
-        def __start_trans(self, methodstr, header, client_release, pkg_name):
+        def __start_trans(self, baseurl, header, client_release, pkg_name):
                 """Start a publication transaction."""
 
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
                 request_str = urllib.quote(pkg_name, "")
                 requesturl = urlparse.urljoin(baseurl, request_str)
 
@@ -794,37 +827,79 @@
                 client_release, and the package's name in pkg_name.
                 Returns a transaction-ID."""
 
-                methodstr = "append/0/"
-                return self.__start_trans(methodstr, header, client_release,
+                baseurl = self.__get_request_url("append/0/")
+                return self.__start_trans(baseurl, header, client_release,
                     pkg_name)
 
         def publish_refresh_index(self, header=None):
                 """If the Repo points to a Repository that has a refresh-able
                 index, refresh the index."""
 
-                methodstr = "index/0/refresh/"
-                requesturl = urlparse.urljoin(self._repouri.uri, methodstr)
+                requesturl = self.__get_request_url("admin/0", query={
+                    "cmd": "rebuild" }, pub=pub)
+                fobj = self._fetch_url(requesturl, header=header,
+                    failonerror=False)
+                self.__check_response_body(fobj)
+
+        def publish_rebuild(self, header=None, pub=None):
+                """Attempt to rebuild the package data and search data in the
+                repository."""
+
+                requesturl = self.__get_request_url("admin/0", query={
+                    "cmd": "rebuild" }, pub=pub)
+                fobj = self._fetch_url(requesturl, header=header,
+                    failonerror=False)
+                self.__check_response_body(fobj)
+
+        def publish_rebuild_indexes(self, header=None, pub=None):
+                """Attempt to rebuild the search data in the repository."""
+
+                requesturl = self.__get_request_url("admin/0", query={
+                    "cmd": "rebuild-indexes" }, pub=pub)
+                fobj = self._fetch_url(requesturl, header=header,
+                    failonerror=False)
+                self.__check_response_body(fobj)
+
+        def publish_rebuild_packages(self, header=None, pub=None):
+                """Attempt to rebuild the package data in the repository."""
+
+                requesturl = self.__get_request_url("admin/0", query={
+                    "cmd": "rebuild-packages" }, pub=pub)
+                fobj = self._fetch_url(requesturl, header=header,
+                    failonerror=False)
+                self.__check_response_body(fobj)
+
+        def publish_refresh(self, header=None, pub=None):
+                """Attempt to refresh the package data and search data in the
+                repository."""
+
+                requesturl = self.__get_request_url("admin/0", query={
+                    "cmd": "refresh" }, pub=pub)
+                fobj = self._fetch_url(requesturl, header=header,
+                    failonerror=False)
+                self.__check_response_body(fobj)
+
+        def publish_refresh_indexes(self, header=None, pub=None):
+                """Attempt to refresh the search data in the repository."""
+
+                if self.supports_version("admin", [0]):
+                        requesturl = self.__get_request_url("admin/0", query={
+                            "cmd": "refresh-indexes" }, pub=pub)
+                else:
+                        requesturl = self.__get_request_url("index/0/refresh")
 
                 fobj = self._fetch_url(requesturl, header=header,
                     failonerror=False)
+                self.__check_response_body(fobj)
 
-                try:
-                        fobj.free_buffer = False
-                        fobj.read()
-                except tx.TransportProtoError, e:
-                        if e.code == httplib.BAD_REQUEST:
-                                exc_type, exc_value, exc_tb = sys.exc_info()
-                                try:
-                                        e.details = self._parse_html_error(
-                                            fobj.read())
-                                except:
-                                        # If parse fails, raise original
-                                        # exception.
-                                        raise exc_value, None, exc_tb
+        def publish_refresh_packages(self, header=None, pub=None):
+                """Attempt to refresh the package data in the repository."""
 
-                        raise
-                finally:
-                        fobj.close()
+                requesturl = self.__get_request_url("admin/0", query={
+                    "cmd": "refresh-packages" }, pub=pub)
+                fobj = self._fetch_url(requesturl, header=header,
+                    failonerror=False)
+                self.__check_response_body(fobj)
 
         def supports_version(self, op, verlist):
                 """Returns version-id of highest supported version.
@@ -843,12 +918,10 @@
                                 return v
                 return -1
 
-        def touch_manifest(self, mfst, header=None, ccancel=None):
+        def touch_manifest(self, mfst, header=None, ccancel=None, pub=None):
                 """Invoke HTTP HEAD to send manifest intent data."""
 
-                methodstr = "manifest/0/"
-
-                baseurl = urlparse.urljoin(self._repouri.uri, methodstr)
+                baseurl = self.__get_request_url("manifest/0/", pub=pub)
                 requesturl = urlparse.urljoin(baseurl, mfst)
 
                 resp = self._fetch_url_header(requesturl, header,
@@ -859,6 +932,7 @@
 
                 return True
 
+
 class HTTPSRepo(HTTPRepo):
 
         def __init__(self, repostats, repouri, engine):
@@ -936,7 +1010,7 @@
                             allow_fragments=0)
                         path = urllib.url2pathname(path)
                         self._frepo = svr_repo.Repository(read_only=True,
-                            repo_root=path)
+                            root=path)
                 except cfg.ConfigError, e:
                         reason = _("The configuration file for the repository "
                             "is invalid or incomplete:\n%s") % e
@@ -958,7 +1032,7 @@
         def __del__(self):
                 # Dump search cache if repo goes out of scope.
                 if self._frepo:
-                        sqp.TermQuery.clear_cache(self._frepo.index_root)
+                        self._frepo.reset_search()
                         self._frepo = None
 
         def _add_file_url(self, url, filepath=None, progclass=None,
@@ -993,18 +1067,18 @@
 
                 self._verdata = verdict
 
-        def do_search(self, data, header=None, ccancel=None):
+        def do_search(self, data, header=None, ccancel=None, pub=None):
                 """Perform a search against repo."""
 
-                if not self._frepo.search_available:
+                pub_prefix = getattr(pub, "prefix", None)
+                try:
+                        res_list = self._frepo.search(data, pub=pub_prefix)
+                except svr_repo.RepositorySearchUnavailableError:
                         ex = tx.TransportProtoError("file", errno.EAGAIN,
                             reason=_("Search temporarily unavailable."),
                             repourl=self._url)
                         self.__record_proto_error(ex)
                         raise ex
-
-                try:
-                        res_list = self._frepo.search(data)
                 except sqp.QueryException, e:
                         ex = tx.TransportProtoError("file", errno.EINVAL,
                             reason=str(e), repourl=self._url)
@@ -1052,7 +1126,7 @@
                 return output()
 
         def get_catalog1(self, filelist, destloc, header=None, ts=None,
-            progtrack=None, revalidate=False, redownload=False):
+            progtrack=None, pub=None, revalidate=False, redownload=False):
                 """Get the files that make up the catalog components
                 that are listed in 'filelist'.  Download the files to
                 the directory specified in 'destloc'.  The caller
@@ -1065,14 +1139,24 @@
 
                 urllist = []
                 progclass = None
+                pub_prefix = getattr(pub, "prefix", None)
+
                 if progtrack:
                         progclass = ProgressCallback
 
                 # create URL for requests
                 for f in filelist:
-                        url = urlparse.urlunparse(("file", "", 
-                            urllib.pathname2url(self._frepo.catalog_1(f)), "",
-                            "", ""))
+                        try:
+                                url = urlparse.urlunparse(("file", None,
+                                    urllib.pathname2url(self._frepo.catalog_1(f,
+                                    pub=pub_prefix)), None, None, None))
+                        except svr_repo.RepositoryError, e:
+                                ex = tx.TransportProtoError("file",
+                                    errno.EPROTO, reason=str(e),
+                                    repourl=self._url, request=f)
+                                self.__record_proto_error(ex)
+                                raise ex
+
                         urllist.append(url)
                         fn = os.path.join(destloc, f)
                         self._add_file_url(url, filepath=fn, header=header,
@@ -1108,14 +1192,15 @@
 
                 return self._annotate_exceptions(errors)
 
-        def get_datastream(self, fhash, version, header=None, ccancel=None):
+        def get_datastream(self, fhash, version, header=None, ccancel=None, pub=None):
                 """Get a datastream from a repo.  The name of the
                 file is given in fhash."""
 
+                pub_prefix = getattr(pub, "prefix", None)
                 try:
-                        requesturl = urlparse.urlunparse(("file", "", 
-                            urllib.pathname2url(self._frepo.file(fhash)), "",
-                            "", ""))
+                        requesturl = urlparse.urlunparse(("file", None,
+                            urllib.pathname2url(self._frepo.file(fhash,
+                            pub=pub_prefix)), None, None, None))
                 except svr_repo.RepositoryFileNotFoundError, e:
                         ex = tx.TransportProtoError("file", errno.ENOENT,
                             reason=str(e), repourl=self._url, request=fhash)
@@ -1129,32 +1214,12 @@
                 return self._fetch_url(requesturl, header, ccancel=ccancel)
 
         def get_publisherinfo(self, header=None, ccancel=None):
-                """Get publisher/0 information from the repository."""
+                """Get publisher information from the repository."""
 
                 try:
-                        rargs = {}
-                        for prop in ("collection_type", "description",
-                            "legal_uris", "mirrors", "name", "origins",
-                            "refresh_seconds", "registration_uri",
-                            "related_uris"):
-                                rargs[prop] = self._frepo.cfg.get_property(
-                                    "repository", prop)
-
-                        repo = publisher.Repository(**rargs)
-                        alias = self._frepo.cfg.get_property("publisher",
-                            "alias")
-                        pfx = self._frepo.cfg.get_property("publisher",
-                            "prefix")
-                        scas = self._frepo.cfg.get_property("publisher",
-                            "signing_ca_certs")
-                        icas = self._frepo.cfg.get_property("publisher",
-                            "intermediate_certs")
-                        pub = publisher.Publisher(pfx, alias=alias,
-                            repositories=[repo], ca_certs=scas,
-                            inter_certs=icas)
-
+                        pubs = self._frepo.get_publishers()
                         buf = cStringIO.StringIO()
-                        p5i.write(buf, [pub])
+                        p5i.write(buf, pubs)
                 except Exception, e:
                         reason = "Unable to retrieve publisher configuration " \
                             "data:\n%s" % e
@@ -1165,14 +1230,33 @@
                 buf.seek(0)
                 return buf
 
-        def get_manifest(self, fmri, header=None, ccancel=None):
+        def get_status(self, header=None, ccancel=None):
+                """Get status/0 information from the repository."""
+
+                buf = cStringIO.StringIO()
+                try:
+                        rstatus = self._frepo.get_status()
+                        json.dump(rstatus, buf, ensure_ascii=False, indent=2,
+                            sort_keys=True)
+                        buf.write("\n")
+                except Exception, e:
+                        reason = "Unable to retrieve status data:\n%s" % e
+                        ex = tx.TransportProtoError("file", errno.EPROTO,
+                            reason=reason, repourl=self._url)
+                        self.__record_proto_error(ex)
+                        raise ex
+                buf.seek(0)
+                return buf
+
+        def get_manifest(self, fmri, header=None, ccancel=None, pub=None):
                 """Get a manifest from repo.  The fmri of the package for the
                 manifest is given in fmri."""
 
+                pub_prefix = getattr(pub, "prefix", None)
                 try:
-                        requesturl = urlparse.urlunparse(("file", "", 
-                            urllib.pathname2url(self._frepo.manifest(fmri)), "",
-                            "", ""))
+                        requesturl = urlparse.urlunparse(("file", None,
+                            urllib.pathname2url(self._frepo.manifest(fmri,
+                            pub=pub_prefix)), None, None, None))
                 except svr_repo.RepositoryError, e:
                         ex = tx.TransportProtoError("file", errno.EPROTO,
                             reason=str(e), repourl=self._url, request=str(fmri))
@@ -1181,7 +1265,7 @@
 
                 return self._fetch_url(requesturl, header, ccancel=ccancel)
 
-        def get_manifests(self, mfstlist, dest, progtrack=None):
+        def get_manifests(self, mfstlist, dest, progtrack=None, pub=None):
                 """Get manifests named in list.  The mfstlist argument contains
                 tuples (fmri, header).  This is so that each manifest may have
                 unique header information.  The destination directory is spec-
@@ -1189,6 +1273,7 @@
 
                 urlmapping = {}
                 progclass = None
+                pub_prefix = getattr(pub, "prefix", None)
 
                 if progtrack:
                         progclass = ProgressCallback
@@ -1199,9 +1284,9 @@
                 pre_exec_errors = []
                 for fmri, h in mfstlist:
                         try:
-                                url = urlparse.urlunparse(("file", "", 
+                                url = urlparse.urlunparse(("file", None,
                                     urllib.pathname2url(self._frepo.manifest(
-                                    fmri)), "", "", ""))
+                                    fmri, pub=pub_prefix)), None, None, None))
                         except svr_repo.RepositoryError, e:
                                 ex = tx.TransportProtoError("file",
                                     errno.EPROTO, reason=str(e),
@@ -1248,7 +1333,7 @@
 
                 return errors + pre_exec_errors
 
-        def get_files(self, filelist, dest, progtrack, version, header=None):
+        def get_files(self, filelist, dest, progtrack, version, header=None, pub=None):
                 """Get multiple files from the repo at once.
                 The files are named by hash and supplied in filelist.
                 If dest is specified, download to the destination
@@ -1258,6 +1343,7 @@
 
                 urllist = []
                 progclass = None
+                pub_prefix = getattr(pub, "prefix", None)
 
                 if progtrack:
                         progclass = FileProgress
@@ -1268,9 +1354,9 @@
                 pre_exec_errors = []
                 for f in filelist:
                         try:
-                                url = urlparse.urlunparse(("file", "", 
-                                    urllib.pathname2url(self._frepo.file(f)),
-                                    "", "", ""))
+                                url = urlparse.urlunparse(("file", None,
+                                    urllib.pathname2url(self._frepo.file(f,
+                                    pub=pub_prefix)), None, None, None))
                         except svr_repo.RepositoryFileNotFoundError, e:
                                 ex = tx.TransportProtoError("file",
                                     errno.ENOENT, reason=str(e),
@@ -1334,12 +1420,18 @@
 
                 buf = cStringIO.StringIO()
                 vops = {
+                    "abandon": ["0"],
+                    "add": ["0"],
+                    "admin": ["0"],
                     "append": ["0"],
                     "catalog": ["1"],
+                    "close": ["0"],
                     "file": ["0", "1"],
                     "manifest": ["0"],
-                    "publisher": ["0"],
+                    "open": ["0"],
+                    "publisher": ["0", "1"],
                     "search": ["1"],
+                    "status": ["0"],
                     "versions": ["0"],
                 }
 
@@ -1364,6 +1456,9 @@
                 Callers may supply a header, and should supply a transaction
                 id in trans_id."""
 
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
                 try:
                         self._frepo.add(trans_id, action)
                 except svr_repo.RepositoryError, e:
@@ -1384,6 +1479,9 @@
                 transaction id in trans_id. Returns a (publish-state, fmri)
                 tuple."""
 
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
                 try:
                         pkg_state = self._frepo.abandon(trans_id)
                 except svr_repo.RepositoryError, e:
@@ -1391,16 +1489,18 @@
 
                 return None, pkg_state
 
-        def publish_close(self, header=None, trans_id=None, refresh_index=False,
+        def publish_close(self, header=None, trans_id=None,
             add_to_catalog=False):
                 """The close operation tells the Repository to commit
                 the transaction identified by trans_id.  The caller may
-                specify refresh_index and add_to_catalog, if needed.
-                This method returns a (publish-state, fmri) tuple."""
+                specify add_to_catalog, if needed.  This method returns a
+                (publish-state, fmri) tuple."""
+
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
 
                 try:
                         pkg_fmri, pkg_state = self._frepo.close(trans_id,
-                            refresh_index=refresh_index,
                             add_to_catalog=add_to_catalog)
                 except svr_repo.RepositoryError, e:
                         raise tx.TransportOperationError(str(e))
@@ -1413,6 +1513,9 @@
                 client_release, and the package's name in pkg_name.
                 Returns a transaction-ID string."""
 
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
                 try:
                         trans_id = self._frepo.open(client_release, pkg_name)
                 except svr_repo.RepositoryError, e:
@@ -1429,15 +1532,84 @@
 
                 return trans_id
 
-        def publish_refresh_index(self, header=None):
-                """If the Repo points to a Repository that has a refresh-able
-                index, refresh the index."""
+        def publish_rebuild(self, header=None, pub=None):
+                """Attempt to rebuild the package data and search data in the
+                repository."""
+
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
+                pub_prefix = getattr(pub, "prefix", None)
+                try:
+                        self._frepo.rebuild(pub=pub_prefix,
+                            build_catalog=True, build_index=True)
+                except svr_repo.RepositoryError, e:
+                        raise tx.TransportOperationError(str(e))
+
+        def publish_rebuild_indexes(self, header=None, pub=None):
+                """Attempt to rebuild the search data in the repository."""
+
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
+                pub_prefix = getattr(pub, "prefix", None)
+                try:
+                        self._frepo.rebuild(pub=pub_prefix,
+                            build_catalog=False, build_index=True)
+                except svr_repo.RepositoryError, e:
+                        raise tx.TransportOperationError(str(e))
+
+        def publish_rebuild_packages(self, header=None, pub=None):
+                """Attempt to rebuild the package data in the repository."""
+
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
+                pub_prefix = getattr(pub, "prefix", None)
+                try:
+                        self._frepo.rebuild(pub=pub_prefix,
+                            build_catalog=True, build_index=False)
+                except svr_repo.RepositoryError, e:
+                        raise tx.TransportOperationError(str(e))
+
+        def publish_refresh(self, header=None, pub=None):
+                """Attempt to refresh the package data and search data in the
+                repository."""
+
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
+                pub_prefix = getattr(pub, "prefix", None)
+                try:
+                        self._frepo.add_content(pub=pub_prefix,
+                            refresh_index=True)
+                except svr_repo.RepositoryError, e:
+                        raise tx.TransportOperationError(str(e))
+
+        def publish_refresh_indexes(self, header=None, pub=None):
+                """Attempt to refresh the search data in the repository."""
+
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
 
                 try:
                         self._frepo.refresh_index()
                 except svr_repo.RepositoryError, e:
                         raise tx.TransportOperationError(str(e))
 
+        def publish_refresh_packages(self, header=None, pub=None):
+                """Attempt to refresh the package data in the repository."""
+
+                # Calling any publication operation sets read_only to False.
+                self._frepo.read_only = False
+
+                pub_prefix = getattr(pub, "prefix", None)
+                try:
+                        self._frepo.add_content(pub=pub_prefix,
+                            refresh_index=False)
+                except svr_repo.RepositoryError, e:
+                        raise tx.TransportOperationError(str(e))
+
         def supports_version(self, op, verlist):
                 """Returns version-id of highest supported version.
                 If the version is not supported, or no data is available,
@@ -1455,7 +1627,7 @@
                                 return v
                 return -1
 
-        def touch_manifest(self, mfst, header=None, ccancel=None):
+        def touch_manifest(self, mfst, header=None, ccancel=None, pub=None):
                 """No-op for file://."""
 
                 return True
--- a/src/modules/client/transport/transport.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/client/transport/transport.py	Thu Aug 19 23:33:49 2010 -0700
@@ -28,6 +28,7 @@
 import errno
 import httplib
 import os
+import simplejson as json
 import statvfs
 import tempfile
 import urllib
@@ -470,7 +471,7 @@
 
                         try:
                                 fobj = d.do_search(data, header,
-                                    ccancel=ccancel)
+                                    ccancel=ccancel, pub=pub)
                                 if hasattr(fobj, "_prime"):
                                         fobj._prime()
 
@@ -568,7 +569,7 @@
                         try:
 
                                 resp = d.get_catalog(ts, header,
-                                    ccancel=ccancel)
+                                    ccancel=ccancel, pub=pub)
 
                                 updatelog.recv(resp, croot, ts, pub)
 
@@ -661,7 +662,6 @@
 
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 failures = []
-                repo_found = False
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
                 if progtrack and ccancel:
@@ -720,7 +720,6 @@
 
                         failedreqs = []
                         repostats = self.stats[d.get_url()]
-                        repo_found = True
                         gave_up = False
 
                         # This returns a list of transient errors
@@ -729,7 +728,7 @@
                         # unless we want to supress a permanent failure.
                         try:
                                 errlist = d.get_catalog1(flist, download_dir,
-                                    header, ts, progtrack=progtrack,
+                                    header, ts, progtrack=progtrack, pub=pub,
                                     redownload=redownload,
                                     revalidate=revalidate)
                         except tx.ExcessiveTransientFailure, ex:
@@ -802,10 +801,6 @@
                         if not flist and not errlist:
                                 return
 
-                if not repo_found:
-                        raise apx.UnsupportedRepositoryOperation(pub,
-                            "catalog/1")
-
                 if failedreqs and failures:
                         failures = [
                             x for x in failures
@@ -825,19 +820,13 @@
 
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 failures = tx.TransportFailures()
-                repo_found = False
                 header = None
 
                 if isinstance(pub, publisher.Publisher):
                         header = self.__build_header(uuid=self.__get_uuid(pub))
 
-                # Call setup if the transport isn't configured or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
                 for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
                     operation="publisher", versions=[0], ccancel=ccancel):
-                        repo_found = True
                         try:
                                 resp = d.get_publisherinfo(header,
                                     ccancel=ccancel)
@@ -870,9 +859,6 @@
                                 else:
                                         raise
 
-                if not repo_found:
-                        raise apx.UnsupportedRepositoryOperation(pub,
-                            "publisher/0")
                 raise failures
 
         @LockedTransport()
@@ -886,10 +872,6 @@
                 failures = tx.TransportFailures()
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
-                # Call setup if the transport isn't configured or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
                 for d, v in self.__gen_repo(pub, retry_count, operation="file",
                     versions=[0, 1]):
 
@@ -897,7 +879,7 @@
 
                         try:
                                 resp = d.get_datastream(fhash, v, header,
-                                    ccancel=ccancel)
+                                    ccancel=ccancel, pub=pub)
                                 s = cStringIO.StringIO()
                                 hash_val = misc.gunzip_from_stream(resp, s)
                                 content = s.getvalue()
@@ -930,6 +912,51 @@
                 raise failures
 
         @LockedTransport()
+        def get_status(self, pub, ccancel=None):
+                """Given a publisher pub, return the stats information
+                for the repository as a dictionary."""
+
+                retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
+                failures = tx.TransportFailures()
+                header = None
+
+                if isinstance(pub, publisher.Publisher):
+                        header = self.__build_header(uuid=self.__get_uuid(pub))
+
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    operation="status", versions=[0], ccancel=ccancel):
+                        try:
+                                resp = d.get_status(header, ccancel=ccancel)
+                                infostr = resp.read()
+
+                                # If parse succeeds, then the data is valid.
+                                return dict(json.loads(infostr))
+                        except tx.ExcessiveTransientFailure, e:
+                                # If an endpoint experienced so many failures
+                                # that we just gave up, grab the list of
+                                # failures that it contains
+                                failures.extend(e.failures)
+
+                        except (TypeError, ValueError), e:
+                                url = d.get_url()
+                                exc = tx.TransferContentException(url,
+                                    "Invalid stats response: %s" % e)
+                                repostats = self.stats[url]
+                                repostats.record_error(content=True)
+                                if exc.retryable:
+                                        failures.append(exc)
+                                else:
+                                        raise exc
+
+                        except tx.TransportException, e:
+                                if e.retryable:
+                                        failures.append(e)
+                                else:
+                                        raise
+
+                raise failures
+
+        @LockedTransport()
         def touch_manifest(self, fmri, intent=None, ccancel=None):
                 """Touch a manifest.  This operation does not
                 return the manifest's content.  The FMRI is given
@@ -944,17 +971,14 @@
                 header = self.__build_header(intent=intent,
                     uuid=self.__get_uuid(pub))
 
-                # Call setup if the transport isn't configured or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
                 for d in self.__gen_repo(pub, retry_count, origin_only=True):
 
                         # If a transport exception occurs,
                         # save it if it's retryable, otherwise
                         # raise the error to a higher-level handler.
                         try:
-                                d.touch_manifest(mfst, header, ccancel=ccancel)
+                                d.touch_manifest(mfst, header, ccancel=ccancel,
+                                    pub=pub)
                                 return
 
                         except tx.ExcessiveTransientFailure, ex:
@@ -1009,7 +1033,7 @@
                         verified = False
                         try:
                                 resp = d.get_manifest(fmri, header,
-                                    ccancel=ccancel)
+                                    ccancel=ccancel, pub=pub)
                                 mcontent = resp.read()
 
                                 verified = self._verify_manifest(fmri,
@@ -1160,7 +1184,7 @@
                         # unless we want to suppress a permanant failure.
                         try:
                                 errlist = d.get_manifests(mfstlist,
-                                    download_dir, progtrack=progtrack)
+                                    download_dir, progtrack=progtrack, pub=pub)
                         except tx.ExcessiveTransientFailure, ex:
                                 # If an endpoint experienced so many failures
                                 # that we just gave up, record this for later
@@ -1399,7 +1423,7 @@
                         # unless we want to supress a permanant failure.
                         try:
                                 errlist = d.get_files(filelist, download_dir,
-                                    progtrack, v, header)
+                                    progtrack, v, header, pub=pub)
                         except tx.ExcessiveTransientFailure, ex:
                                 # If an endpoint experienced so many failures
                                 # that we just gave up, record this for later
@@ -1683,14 +1707,13 @@
                 versions and operation are specified, it returns a tuple
                 of (Repo, highest supported version)."""
 
-                repo = None
-                
                 if not self.__engine:
                         self.__setup()
 
                 # If alt_repository supplied, use that as the Repository.
                 # Otherwise, check that a Publisher was passed, and use
                 # its selected_repository.
+                repo = None
                 if alt_repository:
                         repo = alt_repository
                 elif isinstance(pub, publisher.Publisher):
@@ -1724,11 +1747,14 @@
                 if versions:
                         versions = sorted(versions, reverse=True)
 
+                fail = None
                 for i in xrange(count):
                         rslist = self.stats.get_repostats(repolist, origins)
                         if prefer_remote:
                                 rslist.sort(cmp=remote_first)
 
+                        fail = tx.TransportFailures()
+                        repo_found = False
                         for rs, ruri in rslist:
                                 if operation and versions:
                                         repo = self.__repo_cache.new_repo(rs,
@@ -1738,17 +1764,43 @@
                                                         self.__fill_repo_vers(
                                                             repo,
                                                             ccancel=ccancel)
-                                                except tx.TransportException:
+                                                except tx.TransportException, ex:
+                                                        # Encountered a
+                                                        # transport error while
+                                                        # trying to contact this
+                                                        # origin.  Save the
+                                                        # errors on each retry
+                                                        # so that they can be
+                                                        # raised instead of
+                                                        # an unsupported
+                                                        # operation error.
+                                                        if isinstance(ex,
+                                                            tx.TransportFailures):
+                                                                fail.extend(
+                                                                    ex.exceptions)
+                                                        else:
+                                                                fail.append(ex)
                                                         continue
 
                                         verid = repo.supports_version(operation,
                                             versions)
                                         if verid >= 0:
+                                                repo_found = True
                                                 yield repo, verid
                                 else:
+                                        repo_found = True
                                         yield self.__repo_cache.new_repo(rs,
                                             ruri)
 
+                        if not repo_found and fail:
+                                raise fail
+                        if not repo_found and operation and versions:
+                                # If a versioned operation was requested and
+                                # wasn't found, then raise an unsupported
+                                # exception using the newest version allowed.
+                                raise apx.UnsupportedRepositoryOperation(pub,
+                                    "%s/%d" % (operation, versions[-1]))
+
         def __chunk_size(self, pub, origin_only=False):
                 """Determine the chunk size based upon how many of the known
                 mirrors have been visited.  If not all mirrors have been
@@ -2009,12 +2061,8 @@
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
-                # Call setup if the transport isn't configured or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
-                for d in self.__gen_repo(pub, retry_count, origin_only=True,
-                    single_repository=True):
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="add", versions=[0]):
                         try:
                                 d.publish_add(action, header=header,
                                     trans_id=trans_id)
@@ -2046,10 +2094,8 @@
                 if not self.__engine:
                         self.__setup()
 
-                repo_found = False
                 for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
                     single_repository=True, operation="file", versions=[1]):
-                        repo_found = True
                         try:
                                 d.publish_add_file(pth, header=header,
                                     trans_id=trans_id)
@@ -2064,9 +2110,6 @@
                                         failures.append(e)
                                 else:
                                         raise
-                if not repo_found:
-                        raise apx.UnsupportedRepositoryOperation(pub,
-                            "file/1")
 
                 raise failures
 
@@ -2080,12 +2123,8 @@
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
-                # Call setup if transport isn't configured, or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
-                for d in self.__gen_repo(pub, retry_count, origin_only=True,
-                    single_repository=True):
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="abandon", versions=[0]):
                         try:
                                 state, fmri = d.publish_abandon(header=header,
                                     trans_id=trans_id)
@@ -2108,9 +2147,7 @@
             add_to_catalog=False):
                 """Perform a 'close' publication operation to the
                 publisher supplied in the pub argument.  The caller should
-                also include the transaction id in trans_id.  If
-                the refresh_index argument is true, the repository
-                will be told to refresh its index.  If add_to_catalog
+                also include the transaction id in trans_id.  If add_to_catalog
                 is true, the pkg will be added to the catalog once
                 the transactions close.  Not all transport methods
                 recognize this parameter."""
@@ -2119,16 +2156,11 @@
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
-                # Call setup if transport isn't configured, or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
-                for d in self.__gen_repo(pub, retry_count, origin_only=True,
-                    single_repository=True):
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="close", versions=[0]):
                         try:
                                 state, fmri = d.publish_close(header=header,
                                     trans_id=trans_id,
-                                    refresh_index=refresh_index,
                                     add_to_catalog=add_to_catalog)
                                 return state, fmri
                         except tx.ExcessiveTransientFailure, ex:
@@ -2155,12 +2187,8 @@
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
-                # Call setup if transport isn't configured, or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
-                for d in self.__gen_repo(pub, retry_count, origin_only=True,
-                    single_repository=True):
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="open", versions=[0]):
                         try:
                                 trans_id = d.publish_open(header=header,
                                     client_release=client_release,
@@ -2176,6 +2204,32 @@
                                         failures.append(e)
                                 else:
                                         raise
+                raise failures
+
+        @LockedTransport()
+        def publish_rebuild(self, pub):
+                """Instructs the repositories named by Publisher pub
+                to rebuild package and search data."""
+
+                failures = tx.TransportFailures()
+                retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
+                header = self.__build_header(uuid=self.__get_uuid(pub))
+
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="admin", versions=[0]):
+                        try:
+                                d.publish_rebuild(header=header, pub=pub)
+                                return
+                        except tx.ExcessiveTransientFailure, ex:
+                                # If an endpoint experienced so many failures
+                                # that we just gave up, grab the list of
+                                # failures that it contains
+                                failures.extend(ex.failures)
+                        except tx.TransportException, e:
+                                if e.retryable:
+                                        failures.append(e)
+                                else:
+                                        raise
 
                 raise failures
 
@@ -2194,10 +2248,8 @@
                 if not self.__engine:
                         self.__setup()
 
-                repo_found = False
                 for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
                     single_repository=True, operation="append", versions=[0]):
-                        repo_found = True
                         try:
                                 trans_id = d.publish_append(header=header,
                                     client_release=client_release,
@@ -2213,29 +2265,138 @@
                                         failures.append(e)
                                 else:
                                         raise
-                if not repo_found:
-                        raise apx.UnsupportedRepositoryOperation(pub,
-                            "append/0")
+
+                raise failures
+
+        @LockedTransport()
+        def publish_rebuild_indexes(self, pub):
+                """Instructs the repositories named by Publisher pub
+                to rebuild their search indexes."""
+
+                failures = tx.TransportFailures()
+                retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
+                header = self.__build_header(uuid=self.__get_uuid(pub))
+
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="admin", versions=[0]):
+                        try:
+                                d.publish_rebuild_indexes(header=header,
+                                    pub=pub)
+                                return
+                        except tx.ExcessiveTransientFailure, ex:
+                                # If an endpoint experienced so many failures
+                                # that we just gave up, grab the list of
+                                # failures that it contains
+                                failures.extend(ex.failures)
+                        except tx.TransportException, e:
+                                if e.retryable:
+                                        failures.append(e)
+                                else:
+                                        raise
+
+                raise failures
+
+        @LockedTransport()
+        def publish_rebuild_packages(self, pub):
+                """Instructs the repositories named by Publisher pub
+                to rebuild package data."""
+
+                failures = tx.TransportFailures()
+                retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
+                header = self.__build_header(uuid=self.__get_uuid(pub))
+
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="admin", versions=[0]):
+                        try:
+                                d.publish_rebuild_packages(header=header,
+                                    pub=pub)
+                                return
+                        except tx.ExcessiveTransientFailure, ex:
+                                # If an endpoint experienced so many failures
+                                # that we just gave up, grab the list of
+                                # failures that it contains
+                                failures.extend(ex.failures)
+                        except tx.TransportException, e:
+                                if e.retryable:
+                                        failures.append(e)
+                                else:
+                                        raise
 
                 raise failures
 
         @LockedTransport()
-        def publish_refresh_index(self, pub):
+        def publish_refresh(self, pub):
                 """Instructs the repositories named by Publisher pub
-                to refresh their index."""
+                to refresh package and search data."""
+
+                failures = tx.TransportFailures()
+                retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
+                header = self.__build_header(uuid=self.__get_uuid(pub))
+
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="admin", versions=[0]):
+                        try:
+                                d.publish_refresh(header=header, pub=pub)
+                                return
+                        except tx.ExcessiveTransientFailure, ex:
+                                # If an endpoint experienced so many failures
+                                # that we just gave up, grab the list of
+                                # failures that it contains
+                                failures.extend(ex.failures)
+                        except tx.TransportException, e:
+                                if e.retryable:
+                                        failures.append(e)
+                                else:
+                                        raise
+
+                raise failures
+
+        @LockedTransport()
+        def publish_refresh_indexes(self, pub):
+                """Instructs the repositories named by Publisher pub
+                to refresh their search indexes."""
 
                 failures = tx.TransportFailures()
                 retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
                 header = self.__build_header(uuid=self.__get_uuid(pub))
 
-                # Call setup if transport isn't configured, or was shutdown.
-                if not self.__engine:
-                        self.__setup()
-
+                # In this case, the operation and versions keywords are
+                # purposefully avoided as the underlying repo function
+                # will automatically determine what operation to use
+                # for the single origin returned by __gen_repo.
                 for d in self.__gen_repo(pub, retry_count, origin_only=True,
                     single_repository=True):
                         try:
-                                d.publish_refresh_index(header=header)
+                                d.publish_refresh_indexes(header=header,
+                                    pub=pub)
+                                return
+                        except tx.ExcessiveTransientFailure, ex:
+                                # If an endpoint experienced so many failures
+                                # that we just gave up, grab the list of
+                                # failures that it contains
+                                failures.extend(ex.failures)
+                        except tx.TransportException, e:
+                                if e.retryable:
+                                        failures.append(e)
+                                else:
+                                        raise
+
+                raise failures
+
+        @LockedTransport()
+        def publish_refresh_packages(self, pub):
+                """Instructs the repositories named by Publisher pub
+                to refresh package data."""
+
+                failures = tx.TransportFailures()
+                retry_count = global_settings.PKG_CLIENT_MAX_TIMEOUT
+                header = self.__build_header(uuid=self.__get_uuid(pub))
+
+                for d, v in self.__gen_repo(pub, retry_count, origin_only=True,
+                    single_repository=True, operation="admin", versions=[0]):
+                        try:
+                                d.publish_refresh_packages(header=header,
+                                    pub=pub)
                                 return
                         except tx.ExcessiveTransientFailure, ex:
                                 # If an endpoint experienced so many failures
@@ -2277,7 +2438,6 @@
                 return originuri in self.__repo_cache
 
 
-
 class MultiXfr(object):
         """A transport object for performing multiple simultaneous
         requests.  This object matches publisher to list of requests, and
--- a/src/modules/depotcontroller.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/depotcontroller.py	Thu Aug 19 23:33:49 2010 -0700
@@ -132,8 +132,14 @@
                 return self.__dir
 
         def get_repo(self, auto_create=False):
-                return sr.Repository(auto_create=auto_create,
-                    cfgpathname=self.__cfg_file, repo_root=self.__dir)
+                if auto_create:
+                        try:
+                                sr.repository_create(self.__dir)
+                        except sr.RepositoryExistsError:
+                                # Already exists, nothing to do.
+                                pass
+                return sr.Repository(cfgpathname=self.__cfg_file,
+                    root=self.__dir, writable_root=self.__writable_root)
 
         def get_repo_url(self):
                 return urlparse.urlunparse(("file", "", urllib.pathname2url(
@@ -469,31 +475,6 @@
 
                 return self.kill()
 
-        def wait_search(self):
-                if self.__writable_root:
-                        idx_tmp_dir = os.path.join(self.__writable_root,
-                            "index", "TMP")
-                else:
-                        idx_tmp_dir = os.path.join(self.__dir, "index", "TMP")
-
-                if not os.path.exists(idx_tmp_dir):
-                        return
-
-                begintime = time.time()
-
-                sleeptime = 0.0
-                check_interval = 0.20
-                ready = False
-                while (time.time() - begintime) <= 10.0:
-                        if not os.path.exists(idx_tmp_dir):
-                                ready = True
-                                break
-                        time.sleep(check_interval)
-
-                if not ready:
-                        raise DepotStateException("Depot search "
-                            "readiness timeout exceeded.")
-
 
 def test_func(testdir):
         dc = DepotController()
--- a/src/modules/indexer.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/indexer.py	Thu Aug 19 23:33:49 2010 -0700
@@ -19,21 +19,25 @@
 #
 # CDDL HEADER END
 #
-# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
+
+#
+# Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
+#
 
 import errno
+import fcntl
 import os
+import platform
 import shutil
 import urllib
 
-import pkg.version
-
 import pkg.fmri as fmri
 import pkg.manifest as manifest
+import pkg.nrlock
 import pkg.search_storage as ss
 import pkg.search_errors as search_errors
-from pkg.misc import EmptyI, PKG_FILE_BUFSIZ
+import pkg.version
+from pkg.misc import EmptyI, PKG_DIR_MODE, PKG_FILE_BUFSIZ
 
 # Constants for indicating whether pkgplans or fmri-manifest path pairs are
 # used as arguments.
@@ -50,6 +54,25 @@
 
 SORT_FILE_MAX_SIZE = 128 * 1024 * 1024
 
+
+def makedirs(pathname):
+        """Create a directory at the specified location if it does not
+        already exist (including any parent directories).
+        """
+
+        try:
+                os.makedirs(pathname, PKG_DIR_MODE)
+        except EnvironmentError, e:
+                if e.filename == pathname and (e.errno == errno.EEXIST or
+                    os.path.exists(e.filename)):
+                        return
+                elif e.errno in (errno.EACCES, errno.EROFS):
+                        raise search_errors.ProblematicPermissionsIndexException(
+                            e.filename)
+                elif e.errno != errno.EEXIST or e.filename != pathname:
+                        raise
+
+
 class Indexer(object):
         """Indexer is a class designed to index a set of manifests or pkg plans
         and provide a compact representation on disk, which is quickly
@@ -58,7 +81,7 @@
         file_version_string = "VERSION: "
 
         def __init__(self, index_dir, get_manifest_func, get_manifest_path_func,
-            progtrack=None, excludes=EmptyI, log=None, 
+            progtrack=None, excludes=EmptyI, log=None,
             sort_file_max_size=SORT_FILE_MAX_SIZE):
                 self._num_keys = 0
                 self._num_manifests = 0
@@ -66,6 +89,8 @@
                 self.get_manifest_func = get_manifest_func
                 self.get_manifest_path_func = get_manifest_path_func
                 self.excludes = excludes
+                self.__lock = pkg.nrlock.NRLock()
+                self.__lockf = None
                 self.__log = log
                 self.sort_file_max_size = sort_file_max_size
                 # This structure was used to gather all index files into one
@@ -101,7 +126,7 @@
                 self._data_dict["fmri_offsets"] = \
                     ss.InvertedDict(ss.FMRI_OFFSETS_FILE, self._data_manf)
                 self._data_fmri_offsets = self._data_dict["fmri_offsets"]
-                
+
                 self._index_dir = index_dir
                 self._tmp_dir = os.path.join(self._index_dir, "TMP")
 
@@ -216,7 +241,7 @@
 
                 p_id = self._data_manf.get_id_and_add(pfmri)
                 pfmri = p_id
-                
+
                 for tok_tup in new_dict.keys():
                         tok, action_type, subtype, fv = tok_tup
                         lst = [(action_type, [(subtype, [(fv, [(pfmri,
@@ -288,7 +313,7 @@
                                         self._data_fast_add.remove_entity(o_tmp)
                                 else:
                                         self._data_fast_remove.add_entity(o_tmp)
-                        
+
                         if self._progtrack is not None:
                                 self._progtrack.index_add_progress()
                 return
@@ -402,7 +427,7 @@
                 # temporary sort file with that number.
                 fh_dict = dict([
                     (i, open(os.path.join(self._tmp_dir,
-                    SORT_FILE_PREFIX + str(i)), "rb", 
+                    SORT_FILE_PREFIX + str(i)), "rb",
                     buffering=PKG_FILE_BUFSIZ))
                     for i in range(self._sort_file_num)
                 ])
@@ -473,7 +498,7 @@
                         old_min_token = min_token
                         yield min_token, res
                 return
-                
+
         def _update_index(self, dicts, out_dir):
                 """Processes the main dictionary file and writes out a new
                 main dictionary file reflecting the changes in the packages.
@@ -622,7 +647,7 @@
                             d == self._data_token_offset:
                                 continue
                         d.write_dict_file(out_dir, self.file_version_number)
-                        
+
         def _generic_update_index(self, inputs, input_type,
             tmp_index_dir=None, image=None):
                 """Performs all the steps needed to update the indexes.
@@ -641,27 +666,31 @@
                 of packages added since last index rebuild is greater than
                 MAX_ADDED_NUMBER_PACKAGES."""
 
-                # Allow the use of a directory other than the default
-                # directory to store the intermediate results in.
-                if not tmp_index_dir:
-                        tmp_index_dir = self._tmp_dir
-                assert not (tmp_index_dir == self._index_dir)
-
-                # Read the existing dictionaries.
-                self._read_input_indexes(self._index_dir)
-
-                
+                self.lock()
                 try:
-                        # If the tmp_index_dir exists, it suggests a previous
-                        # indexing attempt aborted or that another indexer is
-                        # running. In either case, throw an exception.
-                        try:
-                                os.makedirs(os.path.join(tmp_index_dir))
-                        except OSError, e:
-                                if e.errno == errno.EEXIST:
-                                        raise search_errors.PartialIndexingException(tmp_index_dir)
-                                else:
-                                        raise
+                        # Allow the use of a directory other than the default
+                        # directory to store the intermediate results in.
+                        if not tmp_index_dir:
+                                tmp_index_dir = self._tmp_dir
+                        assert not (tmp_index_dir == self._index_dir)
+
+                        # Read the existing dictionaries.
+                        self._read_input_indexes(self._index_dir)
+                except:
+                        self.unlock()
+                        raise
+
+                try:
+                        # If the temporary indexing directory already exists,
+                        # remove it to ensure its empty.  Since the caller
+                        # should have locked the index already, this should
+                        # be safe.
+                        if os.path.exists(tmp_index_dir):
+                                shutil.rmtree(tmp_index_dir)
+
+                        # Create directory.
+                        makedirs(os.path.join(tmp_index_dir))
+
                         inputs = list(inputs)
                         fast_update = False
 
@@ -682,10 +711,14 @@
                                                 self._progtrack.index_optimize()
                                         self._data_fast_add.clear()
                                         self._data_fast_remove.clear()
-                                        self.rebuild_index_from_scratch(
+
+                                        # Before passing control to rebuild
+                                        # index, the index lock must be
+                                        # released.
+                                        self.unlock()
+                                        return self.rebuild_index_from_scratch(
                                             image.gen_installed_pkgs(),
                                             tmp_index_dir)
-                                        return
 
                         elif input_type == IDX_INPUT_TYPE_FMRI:
                                 assert not self._sort_fh
@@ -721,8 +754,10 @@
 
                 finally:
                         self._data_main_dict.close_file_handle()
-                
-        def client_update_index(self, pkgplan_list, image, tmp_index_dir = None):
+                        if self.__lock and self.__lock.locked:
+                                self.unlock()
+
+        def client_update_index(self, pkgplan_list, image, tmp_index_dir=None):
                 """This version of update index is designed to work with the
                 client side of things.  Specifically, it expects a pkg plan
                 list with added and removed FMRIs/manifests.  Note: if
@@ -736,7 +771,7 @@
                 self._generic_update_index(pkgplan_list, IDX_INPUT_TYPE_PKG,
                     tmp_index_dir=tmp_index_dir, image=image)
 
-        def server_update_index(self, fmris, tmp_index_dir = None):
+        def server_update_index(self, fmris, tmp_index_dir=None):
                 """ This version of update index is designed to work with the
                 server side of things. Specifically, since we don't currently
                 support removal of a package from a repo, this function simply
@@ -746,8 +781,8 @@
                 specified, it must NOT exist in the current directory structure.
                 This prevents the indexer from accidentally removing files."""
 
-                self._generic_update_index(fmris,
-                    IDX_INPUT_TYPE_FMRI, tmp_index_dir)
+                self._generic_update_index(fmris, IDX_INPUT_TYPE_FMRI,
+                    tmp_index_dir)
 
         def check_index_existence(self):
                 """ Returns a boolean value indicating whether a consistent
@@ -772,7 +807,7 @@
                 return res
 
         def rebuild_index_from_scratch(self, fmris,
-            tmp_index_dir = None):
+            tmp_index_dir=None):
                 """Removes any existing index directory and rebuilds the
                 index based on the fmris and manifests provided as an
                 argument.
@@ -782,14 +817,18 @@
 
                 self.file_version_number = INITIAL_VERSION_NUMBER
                 self.empty_index = True
-                
+
+                self.lock()
                 try:
                         shutil.rmtree(self._index_dir)
-                        os.makedirs(self._index_dir)
+                        makedirs(self._index_dir)
                 except OSError, e:
                         if e.errno == errno.EACCES:
                                 raise search_errors.ProblematicPermissionsIndexException(
                                     self._index_dir)
+                finally:
+                        self.unlock()
+
                 self._generic_update_index(fmris,
                     IDX_INPUT_TYPE_FMRI, tmp_index_dir)
                 self.empty_index = False
@@ -801,9 +840,7 @@
                 absent = False
                 present = False
 
-                if not os.path.exists(self._index_dir):
-                        os.makedirs(self._index_dir)
-                
+                makedirs(self._index_dir)
                 for d in self._data_dict.values():
                         file_path = os.path.join(self._index_dir,
                             d.get_file_name())
@@ -812,14 +849,13 @@
                         else:
                                 absent = True
                         if absent and present:
-                                raise \
-                                    search_errors.InconsistentIndexException( \
+                                raise search_errors.InconsistentIndexException(
                                         self._index_dir)
                 if present:
                         return
                 if self.file_version_number:
-                        raise RuntimeError("Got file_version_number other"
-                                           "than None in setup.")
+                        raise RuntimeError("Got file_version_number other than "
+                            "None in setup.")
                 self.file_version_number = INITIAL_VERSION_NUMBER
                 for d in self._data_dict.values():
                         d.write_dict_file(self._index_dir,
@@ -835,8 +871,8 @@
                 'cat' is the catalog to check for new fmris."""
 
                 fmri_set = set((f.remove_publisher() for f in cat.fmris()))
-                
-                data =  ss.IndexStoreSet("full_fmri_list")
+
+                data = ss.IndexStoreSet("full_fmri_list")
                 try:
                         data.open(index_root)
                 except IOError, e:
@@ -901,3 +937,92 @@
                                     os.path.join(source_dir, "__st_" + st),
                                     os.path.join(dest_dir, "__st_" + st))
                 shutil.rmtree(source_dir)
+
+        def lock(self, blocking=True):
+                """Locks the index in preparation for an index-modifying
+                operation.  Raises an IndexLockedException exception on
+                failure.
+
+                'blocking' is an optional boolean value indicating whether
+                to block until the lock can be obtained or to raise an
+                exception immediately if it cannot be."""
+
+                # First, attempt to obtain a thread lock.
+                if not self.__lock.acquire(blocking=blocking):
+                        raise search_errors.IndexLockedException()
+
+                try:
+                        # Attempt to obtain a file lock.
+                        self.__lockf = self.__lock_process(self._index_dir,
+                            blocking=blocking)
+                except:
+                        self.__lock.release()
+                        raise
+
+        @staticmethod
+        def __lock_process(lock_dir, blocking=True):
+                """Create a lock file in the specified directory and attempts
+                to obtain a lock on it to prevent modification by other
+                processes."""
+
+                if not os.path.exists(lock_dir):
+                        # Structure doesn't exist yet so a file lock
+                        # cannot be obtained.
+                        return
+
+                # Attempt to obtain a file lock.
+                lfpath = os.path.join(lock_dir, "lock")
+
+                lock_type = fcntl.LOCK_EX
+                if not blocking:
+                        lock_type |= fcntl.LOCK_NB
+
+                # Attempt an initial open of the lock file.
+                try:
+                        lf = open(lfpath, "ab+")
+                except EnvironmentError, e:
+                        if e.errno in (errno.EACCES, errno.EROFS):
+                                raise search_errors.ProblematicPermissionsIndexException(
+                                    e.filename)
+                        raise
+
+                # Attempt to lock the file.
+                try:
+                        fcntl.lockf(lf, lock_type)
+                except IOError, e:
+                        if e.errno not in (errno.EAGAIN, errno.EACCES):
+                                raise
+
+                        # If the lock failed (because it is likely contended),
+                        # then extract the information about the lock acquirer
+                        # and raise an exception.
+                        pid_data = lf.read().strip()
+                        pid, hostname, lock_ts = pid_data.split("\n", 3)
+                        raise search_errors.IndexLockedException(pid=pid,
+                            hostname=hostname)
+
+                # Store lock time as ISO-8601 basic UTC timestamp in lock file.
+                lock_ts = pkg.catalog.now_to_basic_ts()
+
+                # Store information about the lock acquirer and write it.
+                try:
+                        lf.truncate(0)
+                        lf.write("\n".join((str(os.getpid()), platform.node(),
+                            lock_ts, "\n")))
+                        lf.flush()
+                        return lf
+                except:
+                        lf.close()
+                        raise
+
+        def unlock(self):
+                """Unlocks the index."""
+
+                if self.__lockf:
+                        # To avoid race conditions with the next caller waiting
+                        # for the lock file, it is simply truncated instead of
+                        # removed.
+                        self.__lockf.truncate(0)
+                        self.__lockf.close()
+                        self.__lockf = None
+                self.__lock.release()
--- a/src/modules/misc.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/misc.py	Thu Aug 19 23:33:49 2010 -0700
@@ -315,11 +315,14 @@
                             "unit": uom
                         }
 
-def get_rel_path(request, uri):
+def get_rel_path(request, uri, pub=None):
         # Calculate the depth of the current request path relative to our base
         # uri. path_info always ends with a '/' -- so ignore it when
         # calculating depth.
-        depth = request.path_info.count("/") - 1
+        rpath = request.path_info
+        if pub:
+                rpath = rpath.replace("/%s/" % pub, "/")
+        depth = rpath.count("/") - 1
         return ("../" * depth) + uri
 
 def get_pkg_otw_size(action):
@@ -562,7 +565,8 @@
 
                 def values(self):
                         if self.__values is None:
-                                raise AttributeError, "can't iterate over values"
+                                raise AttributeError, "can't iterate over " \
+                                    "values"
                         return self.__values(self.__obj)
 
                 def get(self, key, default=None):
@@ -736,6 +740,53 @@
 
         return default_root
 
+def parse_uri(uri):
+        """Parse the repository location provided and attempt to transform it
+        into a valid repository URI.
+        """
+
+        if uri.find("://") == -1 and not uri.startswith("file:/"):
+                # Convert the file path to a URI.
+                uri = os.path.abspath(uri)
+                uri = urlparse.urlunparse(("file", "",
+                    urllib.pathname2url(uri), "", "", ""))
+
+        scheme, netloc, path, params, query, fragment = \
+            urlparse.urlparse(uri, "file", allow_fragments=0)
+        scheme = scheme.lower()
+
+        if scheme == "file":
+                # During urlunparsing below, ensure that the path starts with
+                # only one '/' character, if any are present.
+                if path.startswith("/"):
+                        path = "/" + path.lstrip("/")
+
+        # Rebuild the URI with the sanitized components.
+        return urlparse.urlunparse((scheme, netloc, path, params,
+            query, fragment))
+
+
+def makedirs(pathname):
+        """Create a directory at the specified location if it does not
+        already exist (including any parent directories) re-raising any
+        unexpected exceptions as ApiExceptions.
+        """
+
+        try:
+                os.makedirs(pathname, PKG_DIR_MODE)
+        except EnvironmentError, e:
+                if e.filename == pathname and (e.errno == errno.EEXIST or
+                    os.path.exists(e.filename)):
+                        return
+                elif e.errno == errno.EACCES:
+                        raise api_errors.PermissionsException(
+                            e.filename)
+                elif e.errno == errno.EROFS:
+                        raise api_errors.ReadOnlyFileSystemException(
+                            e.filename)
+                elif e.errno != errno.EEXIST or e.filename != pathname:
+                        raise
+
 class Singleton(type):
         """Set __metaclass__ to Singleton to create a singleton.
         See http://en.wikipedia.org/wiki/Singleton_pattern """
@@ -746,7 +797,8 @@
  
         def __call__(self, *args, **kw):
                 if self.instance is None:
-                        self.instance = super(Singleton, self).__call__(*args, **kw)
+                        self.instance = super(Singleton, self).__call__(*args,
+                            **kw)
  
                 return self.instance
 
--- a/src/modules/nrlock.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/nrlock.py	Thu Aug 19 23:33:49 2010 -0700
@@ -45,7 +45,20 @@
     def acquire(self, blocking=1):
         if self._is_owned():
             raise NRLockException()
-        return threading._RLock.acquire(self, blocking)
+        rval = threading._RLock.acquire(self, blocking)
+        if rval:
+                self.__locked = True
+        return rval
+
+    def release(self):
+        if self._is_owned():
+                self.__locked = False
+        threading._RLock.release(self)
+
+    @property
+    def locked(self):
+        """A boolean indicating whether the lock is currently locked."""
+        return self.__locked
 
 class NRLockException(Exception):
 
--- a/src/modules/p5i.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/p5i.py	Thu Aug 19 23:33:49 2010 -0700
@@ -104,13 +104,14 @@
                         alias = p.get("alias", None)
                         prefix = p.get("name", None)
                         signing_ca_certs = p.get("signing_ca_certs", [])
-                        inter_certs = p.get("intermediate_certs", [])
+                        intermediate_certs = p.get("intermediate_certs", [])
                         
                         if not prefix:
                                 prefix = "Unknown"
 
                         pub = publisher.Publisher(prefix, alias=alias,
-                            ca_certs=signing_ca_certs, inter_certs=inter_certs)
+                            ca_certs=signing_ca_certs,
+                            intermediate_certs=intermediate_certs)
                         pkglist = p.get("packages", [])
                         result.append((pub, pkglist))
 
@@ -184,7 +185,7 @@
                     "alias": p.alias,
                     "name": p.prefix,
                     "signing_ca_certs": p.signing_ca_certs,
-                    "intermediate_certs": p.inter_certs,
+                    "intermediate_certs": p.intermediate_certs,
                     "packages": [],
                     "repositories": []
                 }
--- a/src/modules/publish/transaction.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/publish/transaction.py	Thu Aug 19 23:33:49 2010 -0700
@@ -23,6 +23,7 @@
 #
 # Copyright (c) 2007, 2010, Oracle and/or its affiliates. All rights reserved.
 #
+
 """Provides a set of publishing interfaces for interacting with a pkg(5)
 repository.  Note that only the Transaction class should be used directly,
 though the other classes can be referred to for documentation purposes."""
@@ -96,7 +97,7 @@
                             "trans_id": self._args.get("trans_id", ""),
                             "status": self._args["status"],
                             "msg": self._args.get("msg", "") }
-                if "trans_id" in self._args:
+                if self._args.get("trans_id", None):
                         return _("'%(op)s' failed for transaction ID "
                             "'%(trans_id)s': %(msg)s") % { "op": self.data,
                             "trans_id": self._args["trans_id"],
@@ -130,8 +131,7 @@
         purposes."""
 
         def __init__(self, origin_url, create_repo=False, pkg_name=None,
-            repo_props=EmptyDict, trans_id=None, refresh_index=True,
-            xport=None, pub=None):
+            repo_props=EmptyDict, trans_id=None, xport=None, pub=None):
                 self.create_repo = create_repo
                 self.origin_url = origin_url
                 self.pkg_name = pkg_name
@@ -160,7 +160,7 @@
                             "be added is not a file.  The path given was %s.") %
                             pth))
 
-        def close(self, abandon=False, refresh_index=True):
+        def close(self, abandon=False, add_to_catalog=True):
                 """Ends an in-flight transaction.  Returns a tuple containing
                 a package fmri (if applicable) and the final state of the
                 related package."""
@@ -190,12 +190,12 @@
                 Returns nothing."""
                 pass
 
+
 class TransportTransaction(object):
         """Provides a publishing interface that uses client transport."""
 
         def __init__(self, origin_url, create_repo=False, pkg_name=None,
-            repo_props=EmptyDict, trans_id=None, refresh_index=True,
-            xport=None, pub=None):
+            repo_props=EmptyDict, trans_id=None, xport=None, pub=None):
 
                 scheme, netloc, path, params, query, fragment = \
                     urlparse.urlparse(origin_url, "http", allow_fragments=0)
@@ -209,23 +209,34 @@
 
                 if scheme == "file":
                         self.create_file_repo(origin_url, repo_props=repo_props,
-                            create_repo=create_repo,
-                            refresh_index=refresh_index)
+                            create_repo=create_repo)
                 elif scheme != "file" and create_repo:
                         raise UnsupportedRepoTypeOperationError("create_repo",
                             type=scheme)
 
-
         def create_file_repo(self, origin_url, repo_props=EmptyDict,
-            create_repo=False, refresh_index=True):
+            create_repo=False):
 
                 if self.transport.publish_cache_contains(self.publisher):
                         return
-        
+
+                if create_repo:
+                        try:
+                                # For compatbility reasons, assume that
+                                # repositories created using pkgsend
+                                # should be in version 3 format (single
+                                # publisher only).
+                                sr.repository_create(self.path, version=3)
+                        except sr.RepositoryExistsError:
+                                # Already exists, nothing to do.
+                                pass
+                        except (apx.ApiException, sr.RepositoryError), _e:
+                                raise TransactionOperationError(None,
+                                    msg=str(e))
+
                 try:
-                        repo = sr.Repository(auto_create=create_repo,
-                            properties=repo_props, repo_root=self.path, 
-                            refresh_index=refresh_index)
+                        repo = sr.Repository(properties=repo_props,
+                            root=self.path)
                 except EnvironmentError, e:
                         raise TransactionOperationError(None, msg=_(
                             "An error occurred while trying to "
@@ -281,7 +292,7 @@
                         raise TransactionOperationError("add_file",
                             trans_id=self.trans_id, msg=msg)
 
-        def close(self, abandon=False, refresh_index=True, add_to_catalog=True):
+        def close(self, abandon=False, add_to_catalog=True):
                 """Ends an in-flight transaction.  Returns a tuple containing
                 a package fmri (if applicable) and the final state of the
                 related package.
@@ -290,10 +301,6 @@
                 otherwise the server will discard the current transaction and
                 its related data.
 
-                If 'refresh_index' is True, the repository will be instructed
-                to update its search indices after publishing.  Has no effect
-                if 'abandon' is True.
-                
                 'add_to_catalog' tells the depot to add a package to the
                 catalog, if True.
                 """
@@ -307,19 +314,9 @@
                                 raise TransactionOperationError("abandon",
                                     trans_id=self.trans_id, msg=msg)
                 else:
-
-                        # If caller hasn't supplied add_to_catalog, pick an
-                        # appropriate default, based upon the transport.
-                        if add_to_catalog is None:
-                                if self.scheme == "file":
-                                        add_to_catalog = True
-                                else:
-                                        add_to_catalog = False
-                        
                         try:
                                 state, fmri = self.transport.publish_close(
                                     self.publisher, trans_id=self.trans_id,
-                                    refresh_index=refresh_index,
                                     add_to_catalog=add_to_catalog)
                         except apx.TransportError, e:
                                 msg = str(e)
@@ -383,7 +380,7 @@
                 op = "index"
 
                 try:
-                        self.transport.publish_refresh_index(self.publisher)
+                        self.transport.publish_refresh_indexes(self.publisher)
                 except apx.TransportError, e:
                         msg = str(e)
                         raise TransactionOperationError(op,
@@ -422,9 +419,9 @@
             "null": NullTransaction,
         }
 
-        def __new__(cls, origin_url, add_to_catalog=True, create_repo=False,
-            pkg_name=None, repo_props=EmptyDict, trans_id=None,
-            noexecute=False, refresh_index=True, xport=None, pub=None):
+        def __new__(cls, origin_url, create_repo=False, pkg_name=None,
+            repo_props=EmptyDict, trans_id=None, noexecute=False, xport=None,
+            pub=None):
 
                 scheme, netloc, path, params, query, fragment = \
                     urlparse.urlparse(origin_url, "http", allow_fragments=0)
@@ -452,6 +449,8 @@
                         # are present
                         if path.startswith("/"):
                                 path = "/" + path.lstrip("/")
+                        elif not path:
+                                raise TransactionRepositoryURLError(origin_url)
 
                 # Rebuild the url with the sanitized components.
                 origin_url = urlparse.urlunparse((scheme, netloc, path, params,
@@ -459,5 +458,5 @@
 
                 return cls.__schemes[scheme](origin_url,
                     create_repo=create_repo, pkg_name=pkg_name,
-                    refresh_index=refresh_index, repo_props=repo_props,
-                    trans_id=trans_id, xport=xport, pub=pub)
+                    repo_props=repo_props, trans_id=trans_id, xport=xport,
+                    pub=pub)
--- a/src/modules/search_errors.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/search_errors.py	Thu Aug 19 23:33:49 2010 -0700
@@ -52,15 +52,26 @@
                     "rebuild from scratch by clearing out %s " \
                     " and restarting the depot." % self.cause
 
-class PartialIndexingException(IndexingException):
-        """This is used when the directory the temporary files the indexer
-        should write to already exists."""
+
+class IndexLockedException(IndexingException):
+        """This is used when an attempt to modify an index locked by another
+        thread or process is made."""
+
+        def __init__(self, hostname=None, pid=None):
+                IndexingException.__init__(self, None)
+                self.hostname = hostname
+                self.pid = pid
 
         def __str__(self):
-                return "Unable to build or update search indices. Result of " \
-                    "partial indexing found:%s. Please remove this directory "\
-                    "and start a depot with the --refresh-index flag." % \
-                    self.cause
+                if self.pid is not None:
+                        # Used even if hostname is undefined.
+                        return _("The search indexe cannot be modified as it "
+                            "is currently in use by another process: "
+                            "pid %(pid)s on %(host)s.") % {
+                            "pid": self.pid, "host": self.hostname }
+                return _("The search index cannot be modified as it is "
+                    "currently in use by another process.")
+
 
 class ProblematicPermissionsIndexException(IndexingException):
         """This is used when the indexer is unable to create, move, or remove
@@ -106,45 +117,3 @@
         def __str__(self):
                 return "existing_val was:%s\nincoming_val was:%s" % \
                     (self.ev, self.iv)
-
-class MainDictParsingException(Exception):
-        """This is used when an error occurred while parsing the main search
-        dictionary file."""
-
-        def __init__(self, split_chars, unquote_list, line, file_pos):
-                self.split_chars = split_chars
-                self.unquote_list = unquote_list
-                self.line = line
-                self.file_pos = file_pos
-                
-        def __unicode__(self):
-                # To workaround python issues 6108 and 2517, this provides a
-                # a standard wrapper for this class' exceptions so that they
-                # have a chance of being stringified correctly.
-                return str(self)
-
-        
-class EmptyUnquoteList(MainDictParsingException):
-        """This is used when the function to parse the main dictionary file
-        wasn't given enough values in its unquote_list argument."""
-
-        def __init__(self, split_chars, line):
-                Exception.__init__(self, split_chars, None, line)
-
-        def __str__(self):
-                return _("Got an empty unquote_list while indexing. split_chars"
-                    " was %(sc)s and line was %(l)s" %
-                    { "sc": self.split_chars, "l": self.line })
-
-class EmptyMainDictLine(MainDictParsingException):
-        """This is used when a blank line in the main dictionary file was
-        encountered."""
-
-        def __init__(self, split_chars, unquote_list):
-                Exception.__init__(self, split_chars, unquote_list, None)
-
-        def __str__(self):
-                return _("Had an empty line in the main dictionary. split_chars"
-                    " is %(sc)s and unquote_list is %(ul)s.%(s)s" %
-                    { "sc": self.split_chars, "ul": self.unquote_list, "l": s })
-        
--- a/src/modules/server/api.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/server/api.py	Thu Aug 19 23:33:49 2010 -0700
@@ -40,7 +40,7 @@
 from pkg.api_common import (PackageInfo, LicenseInfo, PackageCategory,
     _get_pkg_cat_data)
 
-CURRENT_API_VERSION = 10
+CURRENT_API_VERSION = 11
 
 class BaseInterface(object):
         """This class represents a base API object that is provided by the
@@ -50,13 +50,16 @@
         needed by interfaces to provide functionality to clients.
         """
 
-        def __init__(self, request, depot):
+        def __init__(self, request, depot, pub):
                 # A protected reference to a pkg.server.depot object.
                 self._depot = depot
 
                 # A protected reference to a cherrypy request object.
                 self._request = request
 
+                # A protected reference to the publisher this interface is for.
+                self._pub = pub
+
 
 class _Interface(object):
         """Private base class used for api interface objects.
@@ -68,6 +71,7 @@
                             version_id)
 
                 self._depot = base._depot
+                self._pub = base._pub
                 self._request = base._request
 
 class CatalogInterface(_Interface):
@@ -84,10 +88,10 @@
                 """A generator function that produces FMRIs as it iterates
                 over the contents of the server's catalog."""
                 try:
-                        c = self._depot.repo.catalog
+                        c = self._depot.repo.get_catalog(self._pub)
                 except srepo.RepositoryMirrorError:
                         return iter(())
-                return self._depot.repo.catalog.fmris()
+                return c.fmris()
 
         def get_entry_all_variants(self, pfmri):
                 """A generator function that yields tuples of the format
@@ -95,10 +99,10 @@
                 variant and variants is a list of the variants for that
                 name."""
                 try:
-                        c = self._depot.repo.catalog
+                        c = self._depot.repo.get_catalog(self._pub)
                 except srepo.RepositoryMirrorError:
                         return iter(((), {}))
-                return self._depot.repo.catalog.get_entry_all_variants(pfmri)
+                return c.get_entry_all_variants(pfmri)
 
         def get_matching_pattern_fmris(self, patterns):
                 """Returns a tuple of a sorted list of PkgFmri objects, newest
@@ -107,7 +111,7 @@
                 match criteria.
                 """
                 try:
-                        c = self._depot.repo.catalog
+                        c = self._depot.repo.get_catalog(self._pub)
                 except srepo.RepositoryMirrorError:
                         return tuple(), {}
                 return pkg.catalog.extract_matching_fmris(c.fmris(),
@@ -128,7 +132,7 @@
                 a single character).
                 """
                 try:
-                        c = self._depot.repo.catalog
+                        c = self._depot.repo.get_catalog(self._pub)
                 except srepo.RepositoryMirrorError:
                         return tuple(), {}
                 return pkg.catalog.extract_matching_fmris(c.fmris(),
@@ -161,8 +165,8 @@
                                 if not pfmri:
                                         notfound.append(pattern)
 
-                repo_cat = self._depot.repo.catalog
-                
+                repo_cat = self._depot.repo.get_catalog(self._pub)
+
                 # Set of options that can use catalog data.
                 cat_opts = frozenset([PackageInfo.SUMMARY,
                     PackageInfo.CATEGORIES, PackageInfo.DESCRIPTION,
@@ -206,10 +210,8 @@
                                 mfst = manifest.Manifest()
                                 mfst.set_fmri(None, f)
                                 try:
-                                        mpath = os.path.join(
-                                            self._depot.repo.manifest_root,
-                                            f.get_dir_path())
-                                except pkg.fmri.FmriError, e:
+                                        mpath = self._depot.repo.manifest(f)
+                                except sr.RepositoryError, e:
                                         notfound.append(f)
                                         continue
 
@@ -218,7 +220,7 @@
                                         continue
 
                                 mfst.set_content(file(mpath).read())
-                                
+
                                 if PackageInfo.LICENSES in info_needed:
                                         licenses = self.__licenses(mfst)
 
@@ -264,7 +266,7 @@
                 available.
                 """
                 try:
-                        c = self._depot.repo.catalog
+                        c = self._depot.repo.get_catalog(self._pub)
                 except srepo.RepositoryMirrorError:
                         return None
                 return c.last_modified
@@ -275,7 +277,7 @@
                 if the catalog is not available.
                 """
                 try:
-                        c = self._depot.repo.catalog
+                        c = self._depot.repo.get_catalog(self._pub)
                 except srepo.RepositoryMirrorError:
                         return None
                 return c.package_count
@@ -286,7 +288,7 @@
                 None if the catalog is not available.
                 """
                 try:
-                        c = self._depot.repo.catalog
+                        c = self._depot.repo.get_catalog(self._pub)
                 except srepo.RepositoryMirrorError:
                         return None
                 return c.package_version_count
@@ -348,8 +350,7 @@
                                         pfmri = result[2][0]
 
                                 if mver is not None:
-                                        if mver != version.Version(pfmri.split(
-                                            "@", 1)[1], None):
+                                        if mver != pfmri.version:
                                                 continue
 
                                 if return_latest and \
@@ -404,7 +405,8 @@
                         # can be immediately raised.
                         query = qp.Query(" ".join(tokens), case_sensitive,
                             return_type, None, None)
-                        res_list = self._depot.repo.search([str(query)])
+                        res_list = self._depot.repo.search([str(query)],
+                            pub=self._pub)
                         if not res_list:
                                 return
 
@@ -412,7 +414,8 @@
 
                 query = qp.Query(" ".join(tokens), case_sensitive,
                     return_type, num_to_return, start_point)
-                res_list = self._depot.repo.search([str(query)])
+                res_list = self._depot.repo.search([str(query)],
+                    pub=self._pub)
                 if not res_list:
                         return
                 return res_list[0]
@@ -422,15 +425,19 @@
                 """Returns a Boolean value indicating whether search
                 functionality is available for the catalog.
                 """
-                return self._depot.repo.search_available
-        
+                try:
+                        rstore = self._depot.repo.get_pub_rstore(self._pub)
+                except srepo.RepositoryUnknownPublisher:
+                        return False
+                return rstore.search_available
+
         def __licenses(self, mfst):
                 """Private function. Returns the license info from the
                 manifest mfst."""
                 license_lst = []
                 for lic in mfst.gen_actions_by_type("license"):
                         s = StringIO.StringIO()
-                        lpath = self._depot.repo.cache_store.lookup(lic.hash)
+                        lpath = self._depot.repo.file(lic.hash, pub=self._pub)
                         lfile = file(lpath, "rb")
                         misc.gunzip_from_stream(lfile, s)
                         text = s.getvalue()
@@ -439,6 +446,22 @@
                             text=text))
                 return license_lst
 
+        @property
+        def version(self):
+                """Returns the version of the catalog or None if no catalog
+                is available.
+                """
+
+                try:
+                        c = self._depot.repo.get_catalog(self._pub)
+                except srepo.RepositoryMirrorError:
+                        return None
+                if hasattr(c, "version"):
+                        return c.version
+                # Assume version 0.
+                return 0
+
+
 class ConfigInterface(_Interface):
         """This class presents a read-only interface to configuration
         information and statistics about the depot that clients may use.
@@ -470,14 +493,14 @@
                 """The number of /filelist operation requests that have occurred
                 during the current server session.
                 """
-                return self._depot.repo.flist_requests
+                return self._depot.flist_requests
 
         @property
         def filelist_file_requests(self):
                 """The number of files served by /filelist operations requested
                 during the current server session.
                 """
-                return self._depot.repo.flist_files
+                return self._depot.flist_file_requests
 
         @property
         def in_flight_transactions(self):
@@ -507,13 +530,6 @@
                 return self._depot.repo.read_only
 
         @property
-        def rename_requests(self):
-                """The number of /rename operation requests that have occurred
-                during the current server session.
-                """
-                return self._depot.repo.pkgs_renamed
-
-        @property
         def web_root(self):
                 """The file system path where the server's web content is
                 located.
@@ -546,67 +562,14 @@
 
                 Section     Property            Description
                 ==========  ==========          ===============
-                publisher   alias               An alternative name for the
-                                                publisher of the packages in
-                                                the repository.
-
-                            prefix              The name of the publisher of
-                                                the packages in the repository.
-
-                repository  collection_type     A constant value indicating the
-                                                type of packages in the
-                                                repository.  See the pydoc for
-                                                pkg.client.publisher.Repository
-                                                for details.
-
-                            description         A string value containing a
-                                                descriptive paragraph for the
-                                                repository.
-
-                            detailed_url        A comma-separated list of URIs
-                                                where more information about the
-                                                repository can be found.
-
-                            legal_uris          A comma-separated list of URIs
-                                                where licensing, legal, and
-                                                terms of service information
-                                                for the repository can be found.
+                publisher   prefix              The name of the default
+                                                publisher to use for packaging
+                                                operations if one is not
+                                                provided.
 
-                            maintainer          A human readable string
-                                                describing the entity
-                                                maintaining the repository.  For
-                                                an individual, this string is
-                                                expected to be their name or
-                                                name and email.
-
-                            maintainer_url      A URI associated with the entity
-                                                maintaining the repository.
-
-                            mirrors             A comma-separated list of URIs
-                                                where package content can be
-                                                retrieved.
-
-                            name                A short, descriptive name for
-                                                the repository.
-
-                            origins             A comma-separated list of URIs
-                                                where package metadata can be
-                                                retrieved.
-
-                            refresh_seconds     An integer value indicating the
-                                                number of seconds clients should
-                                                wait before refreshing cached
-                                                repository catalog or repository
-                                                metadata information.
-
-                            registration_uri    A URI indicating a location
-                                                clients can use to register or
-                                                obtain credentials needed to
-                                                access the repository.
-
-                            related_uris        A comma-separated list of URIs
-                                                of related repositories that a
-                                                client may be interested in.
+                repository  version             An integer value representing
+                                                the version of the repository's
+                                                format.
                 """
                 rval = {}
                 for sname, props in self._depot.repo.cfg.get_index().iteritems():
@@ -639,7 +602,7 @@
         def get_rel_path(self, uri):
                 """Returns uri relative to the current request path.
                 """
-                return pkg.misc.get_rel_path(self._request, uri)
+                return pkg.misc.get_rel_path(self._request, uri, pub=self._pub)
 
         def log(self, msg):
                 """Instruct the server to log the provided message to its error
@@ -662,6 +625,16 @@
                 return self._request.path_info
 
         @property
+        def publisher(self):
+                """The Publisher object for the package data related to this
+                request or None if not available.
+                """
+                try:
+                        return self._depot.repo.get_publisher(self._pub)
+                except srepo.RepositoryUnknownPublisher:
+                        return None
+
+        @property
         def query_string(self):
                 """A string containing the "query_string" portion of the
                 requested URL.
@@ -695,4 +668,3 @@
                 """
                 return cherrypy.url(path=path, qs=qs, script_name=script_name,
                     relative=relative)
-
--- a/src/modules/server/catalog.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/server/catalog.py	Thu Aug 19 23:33:49 2010 -0700
@@ -483,6 +483,12 @@
 
                 return self.attrs.get("origin", None)
 
+        @property
+        def package_count(self):
+                """Returns the number of packages in the catalog."""
+
+                return self.attrs["npkgs"] or 0
+
         @classmethod
         def recv(cls, filep, path, pub=None):
                 """A static method that takes a file-like object and
--- a/src/modules/server/depot.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/server/depot.py	Thu Aug 19 23:33:49 2010 -0700
@@ -36,6 +36,7 @@
 else:
         import select
 
+import atexit
 import cStringIO
 import errno
 import httplib
@@ -44,8 +45,12 @@
 import os
 import random
 import re
+import shutil
+import simplejson as json
 import socket
 import tarfile
+import tempfile
+import threading
 import time
 import urlparse
 # Without the below statements, tarfile will trigger calls to getpwuid and
@@ -57,20 +62,22 @@
 tarfile.grp = None
 
 import urllib
+import Queue
 
 import pkg
 import pkg.actions as actions
 import pkg.catalog as catalog
-import pkg.client.publisher as publisher
 import pkg.config as cfg
 import pkg.fmri as fmri
 import pkg.indexer as indexer
 import pkg.manifest as manifest
 import pkg.misc as misc
+import pkg.nrlock
 import pkg.p5i as p5i
+import pkg.server.catalog as old_catalog
 import pkg.server.face as face
-import pkg.server.repository as repo
-import pkg.version as version
+import pkg.server.repository as srepo
+import pkg.version
 
 from pkg.server.query_parser import Query, ParseError, BooleanQueryException
 
@@ -151,7 +158,9 @@
             "add",
             "p5i",
             "publisher",
-            "index"
+            "index",
+            "status",
+            "admin",
         ]
 
         REPO_OPS_READONLY = [
@@ -163,14 +172,16 @@
             "filelist",
             "file",
             "p5i",
-            "publisher"
+            "publisher",
+            "status",
         ]
 
         REPO_OPS_MIRROR = [
             "versions",
             "filelist",
             "file",
-            "publisher"
+            "publisher",
+            "status",
         ]
 
         content_root = None
@@ -181,8 +192,14 @@
                 doing so, ensure that the operations have been explicitly
                 "exposed" for external usage."""
 
+                # This lock is used to protect the depot from multiple
+                # threads modifying data structures at the same time.
+                self.__lock = pkg.nrlock.NRLock()
+
                 self.cfg = dconf
                 self.repo = repo
+                self.flist_requests = 0
+                self.flist_file_requests = 0
 
                 content_root = dconf.get_property("pkg", "content_root")
                 pkg_root = dconf.get_property("pkg", "pkg_root")
@@ -196,13 +213,25 @@
                         self.content_root = None
                         self.web_root = None
 
+                # Ensure a temporary storage area exists for depot components
+                # to use during operations.
+                tmp_root = dconf.get_property("pkg", "writable_root")
+                if not tmp_root:
+                        # If no writable root, create a temporary area.
+                        tmp_root = tempfile.mkdtemp()
+
+                        # Try to ensure temporary area is cleaned up on exit.
+                        atexit.register(shutil.rmtree, tmp_root,
+                            ignore_errors=True)
+                self.tmp_root = tmp_root
+
                 # Handles the BUI (Browser User Interface).
                 face.init(self)
 
                 # Store any possible configuration changes.
                 repo.write_config()
 
-                if repo.mirror or not repo.repo_root:
+                if repo.mirror or not repo.root:
                         self.ops_list = self.REPO_OPS_MIRROR[:]
                         if not repo.cfg.get_property("publisher", "prefix"):
                                 self.ops_list.remove("publisher")
@@ -239,11 +268,14 @@
                         if op in disable_ops and (ver in disable_ops[op] or
                             "*" in disable_ops[op]):
                                 continue
+                        if not repo.supports(op, int(ver)):
+                                # Unsupported operation.
+                                continue
 
                         func.__dict__["exposed"] = True
 
                         if op in self.vops:
-                                self.vops[op].append(ver)
+                                self.vops[op].append(int(ver))
                         else:
                                 # We need a Dummy object here since we need to
                                 # be able to set arbitrary attributes that
@@ -252,8 +284,17 @@
                                 # dispatch tree mapping mechanism.  We can't
                                 # use other object types here since Python
                                 # won't let us set arbitary attributes on them.
-                                setattr(self, op, Dummy())
-                                self.vops[op] = [ver]
+                                opattr = Dummy()
+                                setattr(self, op, opattr)
+                                self.vops[op] = [int(ver)]
+
+                                for pub in self.repo.publishers:
+                                        pub = pub.replace(".", "_")
+                                        pubattr = getattr(self, pub, None)
+                                        if not pubattr:
+                                                pubattr = Dummy()
+                                                setattr(self, pub, pubattr)
+                                        setattr(pubattr, op, opattr)
 
                         opattr = getattr(self, op)
                         setattr(opattr, ver, func)
@@ -262,6 +303,33 @@
                         # This handles SIGUSR1
                         cherrypy.engine.subscribe("graceful", self.refresh)
 
+                # Setup background task execution handler.
+                self.__bgtask = BackgroundTaskPlugin(cherrypy.engine)
+                self.__bgtask.subscribe()
+
+        def _get_req_pub(self):
+                """Private helper function to retrieve the publisher prefix
+                for the current operation from the request path.  Returns None
+                if a publisher prefix was not found in the request path.  The
+                publisher is assumed to be the first component of the path_info
+                string if it doesn't match the operation's name.  This does mean
+                that a publisher can't be named the same as an operation, but
+                that isn't viewed as an unreasonable limitation.
+                """
+
+                try:
+                        req_pub = cherrypy.request.path_info.strip("/").split(
+                            "/")[0]
+                except IndexError:
+                        return None
+
+                if req_pub not in self.REPO_OPS_DEFAULT and req_pub != "feed":
+                        # Assume that if the first component of the request path
+                        # doesn't match a known operation that it's a publisher
+                        # prefix.
+                        return req_pub
+                return None
+
         def __set_response_expires(self, op_name, expires, max_age=None):
                 """Used to set expiration headers on a response dynamically
                 based on the name of the operation.
@@ -279,8 +347,25 @@
                 parameter is equal to the repository's refresh_seconds
                 property."""
 
-                rs = self.repo.cfg.get_property("repository",
-                    "refresh_seconds")
+                prefix = self._get_req_pub()
+                if not prefix:
+                        prefix = self.repo.cfg.get_property("publisher",
+                            "prefix")
+
+                rs = None
+                if prefix:
+                        try:
+                                pub = self.repo.get_publisher(prefix)
+                        except Exception, e:
+                                # Couldn't get pub.
+                                pass
+                        else:
+                                repo = pub.selected_repository
+                                if repo:
+                                        rs = repo.refresh_seconds
+                if rs is None:
+                        rs = 14400
+
                 if max_age is None:
                         max_age = min((rs, expires))
 
@@ -310,15 +395,36 @@
                 # Handles the BUI (Browser User Interface).
                 face.init(self)
 
+        def __map_pub_ops(self, pub_prefix):
+                # Map the publisher into the depot's operation namespace if
+                # needed.
+                self.__lock.acquire() # Prevent race conditions.
+                try:
+                        pubattr = getattr(self, pub_prefix, None)
+                        if not pubattr:
+                                # Might have already been done in
+                                # another thread.
+                                pubattr = Dummy()
+                                setattr(self, pub_prefix, pubattr)
+
+                                for op in self.vops:
+                                        opattr = getattr(self, op)
+                                        setattr(pubattr, op, opattr)
+                finally:
+                        self.__lock.release()
+
         @cherrypy.expose
         def default(self, *tokens, **params):
                 """Any request that is not explicitly mapped to the repository
                 object will be handled by the "externally facing" server
                 code instead."""
 
+                pub = self._get_req_pub()
                 op = None
-                if len(tokens) > 0:
+                if not pub and tokens:
                         op = tokens[0]
+                elif pub and len(tokens) > 1:
+                        op = tokens[1]
 
                 if op in self.REPO_OPS_DEFAULT and op not in self.vops:
                         raise cherrypy.HTTPError(httplib.NOT_FOUND,
@@ -326,12 +432,17 @@
                 elif op not in self.vops:
                         request = cherrypy.request
                         response = cherrypy.response
-                        return face.respond(self, request, response)
+                        if not misc.valid_pub_prefix(pub):
+                                pub = None
+                        return face.respond(self, request, response, pub)
 
                 # If we get here, we know that 'operation' is supported.
                 # Ensure that we have a integer protocol version.
                 try:
-                        ver = int(tokens[1])
+                        if not pub:
+                                ver = int(tokens[1])
+                        else:
+                                ver = int(tokens[2])
                 except IndexError:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST,
                             "Missing version\n")
@@ -339,7 +450,32 @@
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST,
                             "Non-integer version\n")
 
-                # Assume 'version' is not supported for the operation.
+                if ver not in self.vops[op]:
+                        # 'version' is not supported for the operation.
+                        raise cherrypy.HTTPError(httplib.NOT_FOUND,
+                            "Version '%s' not supported for operation '%s'\n" %
+                            (ver, op))
+                elif op == "open" and pub not in self.repo.publishers:
+                        if not misc.valid_pub_prefix(pub):
+                                raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+                                    "Invalid publisher prefix: %s\n" % pub)
+
+                        # Map operations for new publisher.
+                        self.__map_pub_ops(pub)
+
+                        # Finally, perform an internal redirect so that cherrypy
+                        # will correctly redispatch to the newly mapped
+                        # operations.
+                        rel_uri = cherrypy.request.path_info
+                        if cherrypy.request.query_string:
+                                rel_uri += "?%s" % cherrypy.request.query_string
+                        raise cherrypy.InternalRedirect(rel_uri)
+                elif pub:
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+                            "Unknown publisher: %s\n" % pub)
+
+                # Assume 'version' is not supported for the operation for some
+                # other reason.
                 raise cherrypy.HTTPError(httplib.NOT_FOUND, "Version '%s' not "
                     "supported for operation '%s'\n" % (ver, op))
 
@@ -352,7 +488,7 @@
                 self.__set_response_expires("versions", 5*60, 5*60)
                 versions = "pkg-server %s\n" % pkg.VERSION
                 versions += "\n".join(
-                    "%s %s" % (op, " ".join(vers))
+                    "%s %s" % (op, " ".join(str(v) for v in vers))
                     for op, vers in self.vops.iteritems()
                 ) + "\n"
                 return versions
@@ -375,11 +511,12 @@
                     start_point=None))]
 
                 try:
-                        res_list = self.repo.search(query_args_lst)
-                except repo.RepositorySearchUnavailableError, e:
+                        res_list = self.repo.search(query_args_lst,
+                            pub=self._get_req_pub())
+                except srepo.RepositorySearchUnavailableError, e:
                         raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
                             str(e))
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -428,18 +565,15 @@
                 if not query_str_lst:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST)
 
-                if not self.repo.search_available:
-                        raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
-                            "Search temporarily unavailable")
-
                 try:
-                        res_list = self.repo.search(query_str_lst)
+                        res_list = self.repo.search(query_str_lst,
+                            pub=self._get_req_pub())
                 except (ParseError, BooleanQueryException), e:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
-                except repo.RepositorySearchUnavailableError, e:
+                except srepo.RepositorySearchUnavailableError, e:
                         raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
                             str(e))
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -495,18 +629,30 @@
 
                 # Response headers have to be setup *outside* of the function
                 # that yields the catalog content.
-                c = self.repo.catalog
+                try:
+                        cat = self.repo.get_catalog(pub=self._get_req_pub())
+                except srepo.RepositoryError, e:
+                        cherrypy.log("Request failed: %s" % str(e))
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+
                 response = cherrypy.response
                 response.headers["Content-type"] = "text/plain; charset=utf-8"
-                response.headers["Last-Modified"] = c.last_modified.isoformat()
+                if hasattr(cat, "version"):
+                        response.headers["Last-Modified"] = \
+                            cat.last_modified.isoformat()
+                else:
+                        response.headers["Last-Modified"] = \
+                            old_catalog.ts_to_datetime(
+                            cat.last_modified()).isoformat()
                 response.headers["X-Catalog-Type"] = "full"
                 self.__set_response_expires("catalog", 86400, 86400)
 
                 def output():
                         try:
-                                for l in self.repo.catalog_0():
+                                for l in self.repo.catalog_0(
+                                    pub=self._get_req_pub()):
                                         yield l
-                        except repo.RepositoryError, e:
+                        except srepo.RepositoryError, e:
                                 # Can't do anything in a streaming generator
                                 # except log the error and return.
                                 cherrypy.log("Request failed: %s" % str(e))
@@ -527,8 +673,9 @@
                             _("Directory listing not allowed."))
 
                 try:
-                        fpath = self.repo.catalog_1(name)
-                except repo.RepositoryError, e:
+                        fpath = self.repo.catalog_1(name,
+                            pub=self._get_req_pub())
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -547,10 +694,9 @@
                 of the catalog and its image policies."""
 
                 try:
-                        cat = self.repo.catalog
-                        pubs = cat.publishers()
+                        pubs = self.repo.publishers
                 except Exception, e:
-                        cherrypy.log("Request Failed: %s" % e)
+                        cherrypy.log("Request failed: %s" % e)
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
                 # A broken proxy (or client) has caused a fully-qualified FMRI
@@ -560,7 +706,7 @@
                         raise cherrypy.HTTPError(httplib.FORBIDDEN,
                             _("Directory listing not allowed."))
 
-                if comps[0] == "pkg:" and comps[1] in pubs:
+                if len(comps) > 1 and comps[0] == "pkg:" and comps[1] in pubs:
                         # Only one slash here as another will be added below.
                         comps[0] += "/"
 
@@ -571,10 +717,12 @@
                         # of the fmri and have been split out because of bad
                         # proxy behaviour.
                         pfmri = "/".join(comps)
-                        fpath = self.repo.manifest(pfmri)
-                except (IndexError, repo.RepositoryInvalidFMRIError), e:
+                        pfmri = fmri.PkgFmri(pfmri, None)
+                        fpath = self.repo.manifest(pfmri,
+                            pub=self._get_req_pub())
+                except (IndexError, fmri.FmriError), e:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -619,7 +767,7 @@
                 is output directly to the client. """
 
                 try:
-                        self.repo.inc_flist()
+                        self.flist_requests += 1
 
                         # Create a dummy file object that hooks to the write()
                         # callable which is all tarfile needs to output the
@@ -642,17 +790,18 @@
                         # closed properly regardless of which thread is
                         # executing.
                         cherrypy.request.hooks.attach("on_end_request",
-                            self._tar_stream_close, failsafe = True)
+                            self._tar_stream_close, failsafe=True)
 
+                        pub = self._get_req_pub()
                         for v in params.values():
-                                filepath = self.repo.cache_store.lookup(v)
-
-                                # If file isn't here, skip it
-                                if not filepath:
+                                try:
+                                        filepath = self.repo.file(v, pub=pub)
+                                except srepo.RepositoryFileNotFoundError:
+                                        # If file isn't here, skip it
                                         continue
 
                                 tar_stream.add(filepath, v, False)
-                                self.repo.inc_flist_files()
+                                self.flist_file_requests += 1
 
                         # Flush the remaining bytes to the client.
                         tar_stream.close()
@@ -694,10 +843,10 @@
                         fhash = None
 
                 try:
-                        fpath = self.repo.file(fhash)
-                except repo.RepositoryFileNotFoundError, e:
+                        fpath = self.repo.file(fhash, pub=self._get_req_pub())
+                except srepo.RepositoryFileNotFoundError, e:
                         raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -749,18 +898,28 @@
 
                 # XXX Authentication will be handled by virtue of possessing a
                 # signed certificate (or a more elaborate system).
+                if not pfmri:
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+                            _("A valid package FMRI must be specified."))
 
                 try:
+                        pfmri = fmri.PkgFmri(pfmri, client_release)
                         trans_id = self.repo.open(client_release, pfmri)
-                        response.headers["Content-type"] = "text/plain; charset=utf-8"
-                        response.headers["Transaction-ID"] = trans_id
-                except repo.RepositoryError, e:
+                except (fmri.FmriError, srepo.RepositoryError), e:
                         # Assume a bad request was made.  A 404 can't be
                         # returned here as misc.versioned_urlopen will interpret
                         # that to mean that the server doesn't support this
                         # operation.
+                        cherrypy.log("Request failed: %s" % e)
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
+                if pfmri.publisher and not self._get_req_pub():
+                        self.__map_pub_ops(pfmri.publisher)
+
+                # Set response headers before returning.
+                response.headers["Content-type"] = "text/plain; charset=utf-8"
+                response.headers["Transaction-ID"] = trans_id
+
         @cherrypy.tools.response_headers(headers=[("Pragma", "no-cache"),
             ("Cache-Control", "no-cache, no-transform, must-revalidate"),
             ("Expires", 0)])
@@ -779,19 +938,28 @@
 
                 # XXX Authentication will be handled by virtue of possessing a
                 # signed certificate (or a more elaborate system).
+                if not pfmri:
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+                            _("A valid package FMRI must be specified."))
 
                 try:
+                        pfmri = fmri.PkgFmri(pfmri, client_release)
                         trans_id = self.repo.append(client_release, pfmri)
-                        response.headers["Content-type"] = \
-                            "text/plain; charset=utf-8"
-                        response.headers["Transaction-ID"] = trans_id
-                except repo.RepositoryError, e:
+                except (fmri.FmriError, srepo.RepositoryError), e:
                         # Assume a bad request was made.  A 404 can't be
                         # returned here as misc.versioned_urlopen will interpret
                         # that to mean that the server doesn't support this
                         # operation.
+                        cherrypy.log("Request failed: %s" % e)
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
+                if pfmri.publisher and not self._get_req_pub():
+                        self.__map_pub_ops(pfmri.publisher)
+
+                # Set response headers before returning.
+                response.headers["Content-type"] = "text/plain; charset=utf-8"
+                response.headers["Transaction-ID"] = trans_id
+
         @cherrypy.tools.response_headers(headers=[("Pragma", "no-cache"),
             ("Cache-Control", "no-cache, no-transform, must-revalidate"),
             ("Expires", 0)])
@@ -813,20 +981,6 @@
 
                 try:
                         # Assume "True" for backwards compatibility.
-                        refresh_index = int(request.headers.get(
-                            "X-IPkg-Refresh-Index", 1))
-
-                        # Force a boolean value.
-                        if refresh_index:
-                                refresh_index = True
-                        else:
-                                refresh_index = False
-                except ValueError, e:
-                        raise cherrypy.HTTPError(httplib.BAD_REQUEST,
-                            "X-IPkg-Refresh-Index: %s" % e)
-
-                try:
-                        # Assume "True" for backwards compatibility.
                         add_to_catalog = int(request.headers.get(
                             "X-IPkg-Add-To-Catalog", 1))
 
@@ -841,9 +995,8 @@
 
                 try:
                         pfmri, pstate = self.repo.close(trans_id,
-                            refresh_index=refresh_index,
                             add_to_catalog=add_to_catalog)
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Assume a bad request was made.  A 404 can't be
                         # returned here as misc.versioned_urlopen will interpret
                         # that to mean that the server doesn't support this
@@ -857,6 +1010,79 @@
         @cherrypy.tools.response_headers(headers=[("Pragma", "no-cache"),
             ("Cache-Control", "no-cache, no-transform, must-revalidate"),
             ("Expires", 0)])
+        def admin_0(self, *tokens, **params):
+                """Execute a specified repository administration operation based
+                on the provided query string.  Example:
+
+                        <repo_uri>[/<publisher>]/admin/0?cmd=refresh-index
+
+                Available commands are:
+                        rebuild
+                            Discard search data and package catalogs and
+                            rebuild both.
+                        rebuild-indexes
+                            Discard search data and rebuild.
+                        rebuild-packages
+                            Discard package catalogs and rebuild.
+                        refresh
+                            Update search and package data.
+                        refresh-indexes
+                            Update search data.  (Add packages found in the
+                            repository to their related search indexes.)
+                        refresh-packages
+                            Update package data.  (Add packages found in the
+                            repository to their related catalog.)
+                """
+
+                cmd = params.get("cmd", "")
+
+                # These commands cause the operation requested to be queued
+                # for later execution.  This does mean that if the operation
+                # fails, the client won't know about it, but this is necessary
+                # since these are long running operations (are likely to exceed
+                # connection timeout limits).
+                try:
+                        if cmd == "rebuild":
+                                # Discard existing catalog and search data and
+                                # rebuild.
+                                self.__bgtask.put(self.repo.rebuild,
+                                    pub=self._get_req_pub(), build_catalog=True,
+                                    build_index=True)
+                        elif cmd == "rebuild-indexes":
+                                # Discard search data and rebuild.
+                                self.__bgtask.put(self.repo.rebuild,
+                                    pub=self._get_req_pub(),
+                                    build_catalog=False, build_index=True)
+                        elif cmd == "rebuild-packages":
+                                # Discard package data and rebuild.
+                                self.__bgtask.put(self.repo.rebuild,
+                                    pub=self._get_req_pub(), build_catalog=True,
+                                    build_index=False)
+                        elif cmd == "refresh":
+                                # Add new packages and update search indexes.
+                                self.__bgtask.put(self.repo.add_content,
+                                    pub=self._get_req_pub(), refresh_index=True)
+                        elif cmd == "refresh-indexes":
+                                # Update search indexes.
+                                self.__bgtask.put(self.repo.refresh_index,
+                                    pub=self._get_req_pub())
+                        elif cmd == "refresh-packages":
+                                # Add new packages.
+                                self.__bgtask.put(self.repo.add_content,
+                                    pub=self._get_req_pub(),
+                                    refresh_index=False)
+                        else:
+                                raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+                                   "Unknown or unsupported operation: '%s'" %
+                                   cmd)
+                except Queue.Full:
+                        raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
+                           "Another operation is already in progress; try "
+                           "again later.")
+
+        @cherrypy.tools.response_headers(headers=[("Pragma", "no-cache"),
+            ("Cache-Control", "no-cache, no-transform, must-revalidate"),
+            ("Expires", 0)])
         def abandon_0(self, *tokens):
                 """Aborts an in-flight transaction for the Transaction ID
                 specified in the request path.  Returns no output."""
@@ -869,7 +1095,7 @@
 
                 try:
                         self.repo.abandon(trans_id)
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Assume a bad request was made.  A 404 can't be
                         # returned here as misc.versioned_urlopen will interpret
                         # that to mean that the server doesn't support this
@@ -921,6 +1147,7 @@
                 try:
                         action = actions.types[entry_type](data, **attrs)
                 except actions.ActionError, e:
+                        cherrypy.log("Request failed: %s" % str(e))
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
                 # XXX Once actions are labelled with critical nature.
@@ -929,11 +1156,12 @@
 
                 try:
                         self.repo.add(trans_id, action)
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Assume a bad request was made.  A 404 can't be
                         # returned here as misc.versioned_urlopen will interpret
                         # that to mean that the server doesn't support this
                         # operation.
+                        cherrypy.log("Request failed: %s" % str(e))
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
         # We need to prevent cherrypy from processing the request body so that
@@ -987,23 +1215,35 @@
             ("Cache-Control", "no-cache, no-transform, must-revalidate"),
             ("Expires", 0)])
         def index_0(self, *tokens):
-                """Triggers a refresh of the search indices.
-                Returns no output."""
+                """Provides an administrative interface for search indexing.
+                Returns no output if successful; otherwise the response body
+                will contain the failure details.
+                """
 
-                cmd = tokens[0]
+                try:
+                        cmd = tokens[0]
+                except IndexError:
+                        cmd = ""
 
-                if cmd == "refresh":
-                        try:
-                                self.repo.refresh_index()
-                        except repo.RepositoryError, e:
-                                # Assume a bad request was made.  A 404 can't be
-                                # returned here as misc.versioned_urlopen will interpret
-                                # that to mean that the server doesn't support this
-                                # operation.
-                                raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
-                else:
-                        cherrypy.log("Unknown index subcommand: %s" % cmd)
-                        raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+                # These commands cause the operation requested to be queued
+                # for later execution.  This does mean that if the operation
+                # fails, the client won't know about it, but this is necessary
+                # since these are long running operations (are likely to exceed
+                # connection timeout limits).
+                try:
+                        if cmd == "refresh":
+                                # Update search indexes.
+                                self.__bgtask.put(self.repo.refresh_index,
+                                    pub=self._get_req_pub())
+                        else:
+                                cherrypy.log("Unknown index subcommand: %s" %
+                                    cmd)
+                                raise cherrypy.HTTPError(httplib.NOT_FOUND,
+                                    str(e))
+                except Queue.Full:
+                        raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
+                           "Another operation is already in progress; try "
+                           "again later.")
 
         @cherrypy.tools.response_headers(headers=\
             [("Content-Type", "text/plain; charset=utf-8")])
@@ -1016,16 +1256,15 @@
                     policies. """
 
                 try:
-                        cat = self.repo.catalog
-                        pubs = cat.publishers()
+                        pubs = self.repo.publishers
                 except Exception, e:
-                        cherrypy.log("Request Failed: %s" % e)
+                        cherrypy.log("Request failed: %s" % e)
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
                 # A broken proxy (or client) has caused a fully-qualified FMRI
                 # to be split up.
                 comps = [t for t in tokens]
-                if comps[0] == "pkg:" and comps[1] in pubs:
+                if len(comps) > 1 and comps[0] == "pkg:" and comps[1] in pubs:
                         # Only one slash here as another will be added below.
                         comps[0] += "/"
 
@@ -1036,49 +1275,46 @@
                         # of the fmri and have been split out because of bad
                         # proxy behaviour.
                         pfmri = "/".join(comps)
-                except IndexError:
-                        raise cherrypy.HTTPError(httplib.BAD_REQUEST)
+                        pfmri = fmri.PkgFmri(pfmri, None)
+                        pub = self._get_req_pub()
+                        if not pfmri.publisher:
+                                if not pub:
+                                        pub = self.repo.cfg.get_property(
+                                            "publisher", "prefix")
+                                if pub:
+                                        pfmri.publisher = pub
+                        fpath = self.repo.manifest(pfmri, pub=pub)
+                except (IndexError, fmri.FmriError), e:
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+                except srepo.RepositoryError, e:
+                        # Treat any remaining repository error as a 404, but
+                        # log the error and include the real failure
+                        # information.
+                        cherrypy.log("Request failed: %s" % str(e))
+                        raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
 
-                try:
-                        f = fmri.PkgFmri(pfmri, None)
-                except fmri.FmriError, e:
-                        # If the FMRI couldn't be parsed for whatever reason,
-                        # assume the client made a bad request.
-                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+                if not os.path.exists(fpath):
+                        raise cherrypy.HTTPError(httplib.NOT_FOUND)
 
                 m = manifest.Manifest()
                 m.set_fmri(None, pfmri)
-
-                try:
-                        mpath = os.path.join(self.repo.manifest_root,
-                            f.get_dir_path())
-                except fmri.FmriError, e:
-                        # If the FMRI operation couldn't be performed, assume
-                        # the client made a bad request.
-                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+                m.set_content(file(fpath).read())
 
-                if not os.path.exists(mpath):
-                        raise cherrypy.HTTPError(httplib.NOT_FOUND)
-
-                m.set_content(file(mpath).read())
-
-                publisher, name, ver = f.tuple()
-                if not publisher:
-                        publisher = self.repo.cfg.get_property("publisher",
-                            "prefix")
-                        if not publisher:
-                                publisher = ""
-
+                pub, name, ver = pfmri.tuple()
                 summary = m.get("pkg.summary", m.get("description", ""))
 
                 lsummary = cStringIO.StringIO()
                 for i, entry in enumerate(m.gen_actions_by_type("license")):
                         if i > 0:
                                 lsummary.write("\n")
-                        lpath = self.repo.cache_store.lookup(entry.hash)
+                        try:
+                                lpath = self.repo.file(entry.hash, pub=pub)
+                        except srepo.RepositoryFileNotFoundError:
+                                # Skip the license.
+                                continue
 
-                        lfile = file(lpath, "rb")
-                        misc.gunzip_from_stream(lfile, lsummary)
+                        with file(lpath, "rb") as lfile:
+                                misc.gunzip_from_stream(lfile, lsummary)
                 lsummary.seek(0)
 
                 self.__set_response_expires("info", 86400*365, 86400*365)
@@ -1095,27 +1331,9 @@
 
 License:
 %s
-""" % (name, summary, publisher, ver.release, ver.build_release,
+""" % (name, summary, pub, ver.release, ver.build_release,
     ver.branch, ver.get_timestamp().strftime("%c"),
-    misc.bytes_to_str(m.get_size()), f, lsummary.read())
-
-        def __get_publisher(self):
-                rargs = {}
-                for prop in ("collection_type", "description", "legal_uris",
-                    "mirrors", "name", "origins", "refresh_seconds",
-                    "registration_uri", "related_uris"):
-                        rargs[prop] = self.repo.cfg.get_property(
-                            "repository", prop)
-
-                repo = publisher.Repository(**rargs)
-                alias = self.repo.cfg.get_property("publisher", "alias")
-                icas = self.repo.cfg.get_property("publisher",
-                    "intermediate_certs")
-                pfx = self.repo.cfg.get_property("publisher", "prefix")
-                scas = self.repo.cfg.get_property("publisher",
-                    "signing_ca_certs")
-                return publisher.Publisher(pfx, alias=alias,
-                    repositories=[repo], ca_certs=scas, inter_certs=icas)
+    misc.bytes_to_str(m.get_size()), pfmri, lsummary.read())
 
         @cherrypy.tools.response_headers(headers=[(
             "Content-Type", p5i.MIME_TYPE)])
@@ -1123,18 +1341,20 @@
                 """Returns a pkg(5) information datastream based on the
                 repository configuration's publisher information."""
 
-                try:
-                        pub = self.__get_publisher()
-                except Exception, e:
-                        # If the Publisher object creation fails, return a not
-                        # found error to the client so it will treat it as an
-                        # an unsupported operation.
+                prefix = self._get_req_pub()
+                pubs = [
+                   pub for pub in self.repo.get_publishers()
+                   if not prefix or pub.prefix == prefix
+                ]
+                if prefix and not pubs:
+                        # Publisher specified in request is unknown.
+                        e = srepo.RepositoryUnknownPublisher(prefix)
                         cherrypy.log("Request failed: %s" % str(e))
                         raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
 
                 buf = cStringIO.StringIO()
                 try:
-                        p5i.write(buf, [pub])
+                        p5i.write(buf, pubs)
                 except Exception, e:
                         # Treat any remaining error as a 404, but log it and
                         # include the real failure information.
@@ -1146,6 +1366,85 @@
 
         @cherrypy.tools.response_headers(headers=[(
             "Content-Type", p5i.MIME_TYPE)])
+        def publisher_1(self, *tokens):
+                """Returns a pkg(5) information datastream based on the
+                the request's publisher or all if not specified."""
+
+                prefix = self._get_req_pub()
+
+                pubs = []
+                if not prefix:
+                        pubs = self.repo.get_publishers()
+                else:
+                        try:
+                                pub = self.repo.get_publisher(prefix)
+                                pubs.append(pub)
+                        except Exception, e:
+                                # If the Publisher object creation fails, return
+                                # a not found error to the client so it will
+                                # treat it as an unsupported operation.
+                                cherrypy.log("Request failed: %s" % str(e))
+                                raise cherrypy.HTTPError(httplib.NOT_FOUND,
+                                    str(e))
+
+                buf = cStringIO.StringIO()
+                try:
+                        p5i.write(buf, pubs)
+                except Exception, e:
+                        # Treat any remaining error as a 404, but log it and
+                        # include the real failure information.
+                        cherrypy.log("Request failed: %s" % str(e))
+                        raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
+                buf.seek(0)
+                self.__set_response_expires("publisher", 86400*365, 86400*365)
+                return buf.getvalue()
+
+        def __get_matching_p5i_data(self, rstore, matcher, pfmri):
+                # Attempt to find matching entries in the catalog.
+                try:
+                        pub = self.repo.get_publisher(rstore.publisher)
+                except srepo.RepositoryUnknownPublisher:
+                        return ""
+
+                try:
+                        cat = rstore.catalog
+                except (srepo.RepositoryMirrorError,
+                    srepo.RepositoryUnsupportedOperationError):
+                        return ""
+
+                try:
+                        matches, unmatched = catalog.extract_matching_fmris(
+                            cat.fmris(), patterns=[pfmri],
+                            constraint=pkg.version.CONSTRAINT_AUTO,
+                            matcher=matcher)
+                except Exception, e:
+                        # If this fails, it's ok to raise an exception since bad
+                        # input was likely provided.
+                        cherrypy.log("Request failed: %s" % e)
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+
+                if not matches:
+                        return ""
+                elif matcher in (fmri.exact_name_match, fmri.glob_match):
+                        # When using wildcards or exact name match, trim the
+                        # results to only the unique package stems.
+                        matches = sorted(set([m.pkg_name for m in matches]))
+                else:
+                        # Ensure all fmris are output without publisher prefix
+                        # and without scheme.
+                        matches = [
+                            m.get_fmri(anarchy=True, include_scheme=False)
+                            for m in matches
+                        ]
+
+                buf = cStringIO.StringIO()
+                pkg_names = { pub.prefix: matches }
+                p5i.write(buf, [pub], pkg_names=pkg_names)
+                buf.seek(0)
+                return buf.getvalue()
+
+        @cherrypy.tools.response_headers(headers=[(
+            "Content-Type", p5i.MIME_TYPE)])
         def p5i_0(self, *tokens):
                 """Returns a pkg(5) information datastream for the provided full
                 or partial FMRI using the repository configuration's publisher
@@ -1154,16 +1453,15 @@
                 datastream as provided."""
 
                 try:
-                        cat = self.repo.catalog
-                        pubs = cat.publishers()
+                        pubs = self.repo.publishers
                 except Exception, e:
-                        cherrypy.log("Request Failed: %s" % e)
+                        cherrypy.log("Request failed: %s" % e)
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
                 # A broken proxy (or client) has caused a fully-qualified FMRI
                 # to be split up.
                 comps = [t for t in tokens]
-                if comps[0] == "pkg:" and comps[1] in pubs:
+                if len(comps) > 1 and comps[0] == "pkg:" and comps[1] in pubs:
                         # Only one slash here as another will be added below.
                         comps[0] += "/"
 
@@ -1197,52 +1495,40 @@
                                 raise cherrypy.HTTPError(httplib.BAD_REQUEST,
                                     str(e))
 
-                # Attempt to find matching entries in the catalog.
-                try:
-                        matches, unmatched = catalog.extract_matching_fmris(cat.fmris(),
-                            patterns=[pfmri], constraint=version.CONSTRAINT_AUTO,
-                            matcher=matcher)
-                except Exception, e:
-                        cherrypy.log("Request Failed: %s" % e)
-                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+                output = ""
+                prefix = self._get_req_pub()
+                for rstore in self.repo.rstores:
+                        if not rstore.publisher:
+                                continue
+                        if prefix and prefix != rstore.publisher:
+                                continue
+                        output += self.__get_matching_p5i_data(rstore,
+                            matcher, pfmri)
 
-                if not matches:
+                if output == "":
                         raise cherrypy.HTTPError(httplib.NOT_FOUND, _("No "
-                            "matching FMRI found in repository catalog."))
-                elif matcher in (fmri.exact_name_match, fmri.glob_match):
-                        # When using wildcards or exact name match, trim the
-                        # results to only the unique package stems.
-                        matches = sorted(set([m.pkg_name for m in matches]))
-                else:
-                        # Ensure all fmris are output without publisher prefix
-                        # and without scheme.
-                        matches = [
-                            m.get_fmri(anarchy=True, include_scheme=False)
-                            for m in matches
-                        ]
+                            "matching package found in repository."))
+
+                self.__set_response_expires("p5i", 86400*365, 86400*365)
+                return output
+
+        @cherrypy.tools.response_headers(headers=\
+            [("Content-Type", "application/json; charset=utf-8")])
+        def status_0(self, *tokens):
+                """Return a JSON formatted dictionary containing statistics
+                information for the repository being served."""
+
+                self.__set_response_expires("versions", 5*60, 5*60)
+
+                dump_struct = self.repo.get_status()
 
                 try:
-                        pub = self.__get_publisher()
+                        out = json.dumps(dump_struct, ensure_ascii=False,
+                            indent=2, sort_keys=True)
                 except Exception, e:
-                        # If the Publisher object creation fails, return a not
-                        # found error to the client so it will treat it as an
-                        # unsupported operation.
-                        cherrypy.log("Request failed: %s" % str(e))
-                        raise cherrypy.HTTPError(httplib.NOT_FOUND,
-                            str(e))
-
-                buf = cStringIO.StringIO()
-                try:
-                        pkg_names = { pub.prefix: matches }
-                        p5i.write(buf, [pub], pkg_names=pkg_names)
-                except Exception, e:
-                        # Treat any remaining error as a 404, but log it and
-                        # include the real failure information.
-                        cherrypy.log("Request failed: %s" % str(e))
-                        raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
-                buf.seek(0)
-                self.__set_response_expires("p5i", 86400*365, 86400*365)
-                return buf.getvalue()
+                        raise cherrypy.HTTPError(httplib.NOT_FOUND, _("Unable "
+                            "to generate statistics."))
+                return out + "\n"
 
 
 class NastyDepotHTTP(DepotHTTP):
@@ -1268,9 +1554,8 @@
                     NastyDepotHTTP.nasty_retryable_error)
 
         # Method for CherryPy tool for Nasty Depot
-        @staticmethod
-        def nasty_retryable_error(bonus=0):
-                """A static method that's used by the cherrpy tools,
+        def nasty_retryable_error(self, bonus=0):
+                """A static method that's used by the cherrypy tools,
                 and in depot code, to generate a retryable HTTP error."""
 
                 retryable_errors = [httplib.REQUEST_TIMEOUT,
@@ -1289,21 +1574,26 @@
                 the requesting client.  Incremental catalogs are not supported
                 for v0 catalog clients."""
 
-                request = cherrypy.request
-
                 # Response headers have to be setup *outside* of the function
                 # that yields the catalog content.
-                c = self.repo.catalog
+                try:
+                        cat = self.repo.get_catalog(pub=self._get_req_pub())
+                except srepo.RepositoryError, e:
+                        cherrypy.log("Request failed: %s" % str(e))
+                        raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
+
                 response = cherrypy.response
                 response.headers["Content-type"] = "text/plain; charset=utf-8"
-                response.headers["Last-Modified"] = c.last_modified.isoformat()
+                response.headers["Last-Modified"] = \
+                    cat.last_modified.isoformat()
                 response.headers["X-Catalog-Type"] = "full"
 
                 def output():
                         try:
-                                for l in self.repo.catalog_0():
+                                for l in self.repo.catalog_0(
+                                    pub=self._get_req_pub()):
                                         yield l
-                        except repo.RepositoryError, e:
+                        except srepo.RepositoryError, e:
                                 # Can't do anything in a streaming generator
                                 # except log the error and return.
                                 cherrypy.log("Request failed: %s" % str(e))
@@ -1324,16 +1614,19 @@
                 of the catalog and its image policies."""
 
                 try:
-                        cat = self.repo.catalog
-                        pubs = cat.publishers()
+                        pubs = self.repo.publishers
                 except Exception, e:
-                        cherrypy.log("Request Failed: %s" % e)
+                        cherrypy.log("Request failed: %s" % e)
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
 
                 # A broken proxy (or client) has caused a fully-qualified FMRI
                 # to be split up.
                 comps = [t for t in tokens]
-                if comps[0] == "pkg:" and comps[1] in pubs:
+                if not comps:
+                        raise cherrypy.HTTPError(httplib.FORBIDDEN,
+                            _("Directory listing not allowed."))
+
+                if len(comps) > 1 and comps[0] == "pkg:" and comps[1] in pubs:
                         # Only one slash here as another will be added below.
                         comps[0] += "/"
 
@@ -1344,10 +1637,12 @@
                         # of the fmri and have been split out because of bad
                         # proxy behaviour.
                         pfmri = "/".join(comps)
-                        fpath = self.repo.manifest(pfmri)
-                except (IndexError, repo.RepositoryInvalidFMRIError), e:
+                        pfmri = fmri.PkgFmri(pfmri, None)
+                        fpath = self.repo.manifest(pfmri,
+                            pub=self._get_req_pub())
+                except (IndexError, fmri.FmriError), e:
                         raise cherrypy.HTTPError(httplib.BAD_REQUEST, str(e))
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -1394,7 +1689,7 @@
                 is output directly to the client. """
 
                 try:
-                        self.repo.inc_flist()
+                        self.flist_requests += 1
 
                         # NASTY
                         if self.need_nasty_occasionally():
@@ -1421,12 +1716,13 @@
                         # closed properly regardless of which thread is
                         # executing.
                         cherrypy.request.hooks.attach("on_end_request",
-                            self._tar_stream_close, failsafe = True)
+                            self._tar_stream_close, failsafe=True)
 
                         # NASTY
                         if self.need_nasty_infrequently():
                                 time.sleep(35)
 
+                        pub = self._get_req_pub()
                         for v in params.values():
 
                                 # NASTY
@@ -1449,10 +1745,10 @@
                                         # Take a nap
                                         time.sleep(35)
 
-                                filepath = self.repo.cache_store.lookup(v)
-
-                                # If file isn't here, skip it
-                                if not os.path.exists(filepath):
+                                try:
+                                        filepath = self.repo.file(v, pub=pub)
+                                except srepo.RepositoryFileNotFoundError:
+                                        # If file isn't here, skip it
                                         continue
 
                                 # NASTY
@@ -1467,7 +1763,7 @@
                                 else:
                                         tar_stream.add(filepath, v, False)
 
-                                self.repo.inc_flist_files()
+                                self.flist_file_requests += 1
 
                         # NASTY
                         # Write garbage into the stream
@@ -1480,9 +1776,7 @@
                                 pick = random.randint(0,
                                     len(self.requested_files) - 1)
                                 extrafn = self.requested_files[pick]
-                                extrapath = self.repo.cache_store.lookup(
-                                    extrafn)
-
+                                extrapath = self.repo.file(extrafn, pub=pub)
                                 tar_stream.add(extrapath, extrafn, False)
 
                         # Flush the remaining bytes to the client.
@@ -1517,8 +1811,8 @@
         }
 
         def __get_bad_path(self, v):
-                return os.path.join(self.repo.cache_store.root,
-                    self.repo.cache_store.layouts[0].lookup(v))
+                fpath = self.repo.file(v, pub=self._get_req_pub())
+                return os.path.join(os.path.dirname(fpath), fpath)
 
         def file_0(self, *tokens):
                 """Outputs the contents of the file, named by the SHA-1 hash
@@ -1530,10 +1824,10 @@
                         fhash = None
 
                 try:
-                        fpath = self.repo.file(fhash)
-                except repo.RepositoryFileNotFoundError, e:
+                        fpath = self.repo.file(fhash, pub=self._get_req_pub())
+                except srepo.RepositoryFileNotFoundError, e:
                         raise cherrypy.HTTPError(httplib.NOT_FOUND, str(e))
-                except repo.RepositoryError, e:
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -1585,8 +1879,9 @@
                             _("Directory listing not allowed."))
 
                 try:
-                        fpath = self.repo.catalog_1(name)
-                except repo.RepositoryError, e:
+                        fpath = self.repo.catalog_1(name,
+                            pub=self._get_req_pub())
+                except srepo.RepositoryError, e:
                         # Treat any remaining repository error as a 404, but
                         # log the error and include the real failure
                         # information.
@@ -1771,6 +2066,65 @@
                 self.bus.log("Service unregistration for DNS-SD complete.")
 
 
+class BackgroundTaskPlugin(SimplePlugin):
+        """This class allows background task execution for the depot server.  It
+        is designed in such a way as to only allow a single task to be queued
+        for execution at a time.
+        """
+
+        def __init__(self, bus):
+                # Setup the background task queue.
+                SimplePlugin.__init__(self, bus)
+                self.__q = Queue.Queue(1)
+                self.__thread = None
+
+        def put(self, task, *args, **kwargs):
+                """Schedule the given task for background execution if one
+                is not already.
+                """
+                if self.__q.unfinished_tasks:
+                        raise Queue.Full()
+                self.__q.put_nowait((task, args, kwargs))
+
+        def run(self):
+                """Run any background task scheduled for execution."""
+                while self.__running:
+                        try:
+                                try:
+                                        # A brief timeout here is necessary
+                                        # to reduce CPU usage and to ensure
+                                        # that shutdown doesn't wait forever
+                                        # for a new task to appear.
+                                        task, args, kwargs = \
+                                            self.__q.get(timeout=.5)
+                                except Queue.Empty:
+                                        continue
+                                task(*args, **kwargs)
+                                if hasattr(self.__q, "task_done"):
+                                        # Task is done; mark it so.
+                                        self.__q.task_done()
+                        except:
+                                self.bus.log("Failure encountered executing "
+                                    "background task %r." % self,
+                                    traceback=True)
+
+        def start(self):
+                """Start the background task plugin."""
+                self.__running = True
+                if not self.__thread:
+                        # Create and start a thread for the caller.
+                        self.__thread = threading.Thread(target=self.run)
+                        self.__thread.start()
+
+        def stop(self):
+                """Stop the background task plugin."""
+                self.__running = False
+                if self.__thread:
+                        # Wait for the thread to terminate.
+                        self.__thread.join()
+                        self.__thread = None
+
+
 class DepotConfig(object):
         """Returns an object representing a configuration interface for a
         a pkg(5) depot server.
--- a/src/modules/server/face.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/server/face.py	Thu Aug 19 23:33:49 2010 -0700
@@ -52,19 +52,19 @@
         pkg.server.feed.init(depot)
         tlookup = mako.lookup.TemplateLookup(directories=[depot.web_root])
 
-def feed(depot, request, response):
+def feed(depot, request, response, pub):
         if depot.repo.mirror:
                 raise cherrypy.HTTPError(httplib.NOT_FOUND,
                     "Operation not supported in current server mode.")
-        if not depot.repo.catalog.updates:
+        if not depot.repo.get_catalog(pub).updates:
                 raise cherrypy.HTTPError(httplib.SERVICE_UNAVAILABLE,
                     "No update history; unable to generate feed.")
-        return pkg.server.feed.handle(depot, request, response)
+        return pkg.server.feed.handle(depot, request, response, pub)
 
-def __render_template(depot, request, path):
+def __render_template(depot, request, path, pub):
         template = tlookup.get_template(path)
-        base = api.BaseInterface(request, depot)
-        return template.render_unicode(g_vars={ "base": base })
+        base = api.BaseInterface(request, depot, pub)
+        return template.render_unicode(g_vars={ "base": base, "pub": pub })
 
 def __handle_error(path, error):
         # All errors are treated as a 404 since reverse proxies such as Apache
@@ -76,15 +76,33 @@
 
         raise cherrypy.NotFound()
 
-def respond(depot, request, response):
+def respond(depot, request, response, pub):
         path = request.path_info.strip("/")
+        if pub and os.path.exists(os.path.join(depot.web_root, pub)):
+                # If an item exists under the web root
+                # with this name, it isn't a publisher
+                # prefix.
+                pub = None
+        elif pub and pub not in depot.repo.publishers:
+                raise cherrypy.NotFound()
+
+        if pub:
+                # Strip publisher from path as it can't be used to determine
+                # resource locations.
+                path = path.replace(pub, "").strip("/")
+        else:
+                # No publisher specified in request, so assume default.
+                pub = depot.repo.cfg.get_property("publisher", "prefix")
+                if not pub:
+                        pub = None
+
         if path == "":
                 path = "index.shtml"
         elif path.split("/")[0] == "feed":
                 response.headers.update({ "Expires": 0, "Pragma": "no-cache",
                     "Cache-Control": "no-cache, no-transform, must-revalidate"
                     })
-                return feed(depot, request, response)
+                return feed(depot, request, response, pub)
 
         if not path.endswith(".shtml"):
                 spath = urllib.unquote(path)
@@ -101,7 +119,7 @@
                 response.headers.update({ "Expires": 0, "Pragma": "no-cache",
                     "Cache-Control": "no-cache, no-transform, must-revalidate"
                     })
-                return __render_template(depot, request, path)
+                return __render_template(depot, request, path, pub)
         except sae.VersionException, e:
                 # The user shouldn't see why we can't render a template, but
                 # the reason should be logged (cleanly).
--- a/src/modules/server/feed.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/server/feed.py	Thu Aug 19 23:33:49 2010 -0700
@@ -38,14 +38,15 @@
 import httplib
 import os
 import rfc822
+import shutil
 import time
 import urllib
 import urlparse
 import uuid
 import xml.dom.minidom as xmini
 
-from pkg.misc import get_rel_path
 import pkg.catalog as catalog
+import pkg.misc as misc
 
 MIME_TYPE = "application/atom+xml"
 CACHE_FILENAME = "feed.xml"
@@ -76,11 +77,8 @@
         for feeds to work correctly.
         """
 
-        repo = depot.repo
-        if repo.feed_cache_root and not \
-            (repo.read_only and not repo.writable_root):
-                # Ensure any configuration changes are reflected in the feed.
-                __clear_cache(repo)
+        # Ensure any configuration changes are reflected in the feed.
+        __clear_cache(depot, None)
 
 def set_title(depot, doc, feed, update_ts):
         """This function attaches the necessary RSS/Atom feed elements needed
@@ -132,40 +130,6 @@
         l.appendChild(lt)
         feed.appendChild(l)
 
-        maintainer = depot.repo.cfg.get_property("repository", "maintainer")
-        # The author information isn't required, but can be useful.
-        if maintainer:
-                name, email = rfc822.AddressList(maintainer).addresslist[0]
-
-                if email and not name:
-                        # If we got an email address, but no name, then
-                        # the name was likely parsed as a local address. In
-                        # that case, assume the whole string is the name.
-                        name = maintainer
-                        email = None
-
-                a = doc.createElement("author")
-
-                # First we have to add a name element. This is required if an
-                # author element exists.
-                n = doc.createElement("name")
-                nt = xmini.Text()
-                nt.replaceWholeText(name)
-                n.appendChild(nt)
-                a.appendChild(n)
-
-                if email:
-                        # If we were able to extract an email address from the
-                        # maintainer information, add the optional email
-                        # element to provide a point of communication.
-                        e = doc.createElement("email")
-                        et = xmini.Text()
-                        et.replaceWholeText(email)
-                        e.appendChild(et)
-                        a.appendChild(e)
-
-                # Done with the author.
-                feed.appendChild(a)
 
 add_op = ("Added", "%s was added to the repository.")
 remove_op = ("Removed", "%s was removed from the repository.")
@@ -223,7 +187,8 @@
         e.appendChild(eu)
 
         # Link to the info output for the given package FMRI.
-        e_uri = get_rel_path(request, "info/0/%s" % urllib.quote(str(pfmri)))
+        e_uri = misc.get_rel_path(request,
+            "info/0/%s" % urllib.quote(str(pfmri)))
 
         l = doc.createElement("link")
         l.setAttribute("rel", "alternate")
@@ -242,12 +207,12 @@
 
         feed.appendChild(e)
 
-def get_updates_needed(repo, ts):
+def get_updates_needed(repo, ts, pub):
         """Returns a list of the CatalogUpdate files that contain the changes
         that have been made to the catalog since the specified UTC datetime
         object 'ts'."""
 
-        c = repo.catalog
+        c = repo.get_catalog(pub)
         if c.last_modified <= ts:
                 # No updates needed.
                 return []
@@ -277,9 +242,9 @@
         # Ensure updates are in chronological ascending order.
         return sorted(updates)
 
-def update(request, depot, last, cf):
+def update(request, depot, last, cf, pub):
         """Generate new Atom document for current updates.  The cached feed
-        file is written to repo.feed_cache_root/CACHE_FILENAME.
+        file is written to depot.tmp_root/CACHE_FILENAME.
         """
 
         # Our configuration is stored in hours, convert it to days and seconds.
@@ -293,7 +258,8 @@
         feed = d.createElementNS("http://www.w3.org/2005/Atom", "feed")
         feed.setAttribute("xmlns", "http://www.w3.org/2005/Atom")
 
-        set_title(depot, d, feed, depot.repo.catalog.last_modified)
+        cat = depot.repo.get_catalog(pub)
+        set_title(depot, d, feed, cat.last_modified)
 
         d.appendChild(feed)
 
@@ -301,15 +267,13 @@
         # in the list of updates so that it can be used to quickly determine if
         # the fmri in the update is a 'new' package or an update to an existing
         # package.
-        c = depot.repo.catalog
-
         first = {}
         def get_first(f):
                 stem = f.get_pkg_stem()
                 if stem in first:
                         return first[stem]
 
-                for v, entries in c.entries_by_version(f.pkg_name):
+                for v, entries in cat.entries_by_version(f.pkg_name):
                         # The first version returned is the oldest version.
                         # Add all of the unique package stems for that version
                         # to the list.
@@ -326,8 +290,8 @@
                 return first[stem]
 
         # Updates should be presented in reverse chronological order.
-        for name in reversed(get_updates_needed(depot.repo, feed_ts)):
-                ulog = catalog.CatalogUpdate(name, meta_root=c.meta_root)
+        for name in reversed(get_updates_needed(depot.repo, feed_ts, pub)):
+                ulog = catalog.CatalogUpdate(name, meta_root=cat.meta_root)
                 for entry in ulog.updates():
                         pfmri = entry[0]
                         op_time = entry[2]
@@ -339,15 +303,16 @@
 
         d.writexml(cf)
 
-def __get_cache_pathname(repo):
-        return os.path.join(repo.feed_cache_root, CACHE_FILENAME)
+def __get_cache_pathname(depot, pub):
+        if not pub:
+                return os.path.join(depot.tmp_root, CACHE_FILENAME)
+        return os.path.join(depot.tmp_root, "publisher", pub, CACHE_FILENAME)
 
-def __clear_cache(repo):
-        if repo.read_only and repo.writable_root:
-                # Ignore the request due to repository configuration.
+def __clear_cache(depot, pub):
+        if not pub:
+                shutil.rmtree(os.path.join(depot.tmp_root, "feed"), True)
                 return
-
-        pathname = __get_cache_pathname(repo)
+        pathname = __get_cache_pathname(depot, pub)
         try:
                 if os.path.exists(pathname):
                         os.remove(pathname)
@@ -356,13 +321,13 @@
                     httplib.INTERNAL_SERVER_ERROR,
                     "Unable to clear feed cache.")
 
-def __cache_needs_update(repo):
+def __cache_needs_update(depot, pub):
         """Checks to see if the feed cache file exists and if it is still
         valid.  Returns False, None if the cache is valid or True, last
         where last is a timestamp representing when the cache was
         generated.
         """
-        cfpath = __get_cache_pathname(repo)
+        cfpath = __get_cache_pathname(depot, pub)
         last = None
         need_update = True
         if os.path.isfile(cfpath):
@@ -372,7 +337,7 @@
                         d = xmini.parse(cfpath)
                 except Exception:
                         d = None
-                        __clear_cache(repo)
+                        __clear_cache(depot, pub)
 
                 # Get the feed element and attempt to get the time we last
                 # generated the feed to determine whether we need to regenerate
@@ -396,27 +361,27 @@
                                 # been created within the same second, we need
                                 # to ignore small variances when determining
                                 # whether to update the feed cache.
-                                up_ts = copy.copy(repo.catalog.last_modified)
+                                cat = depot.repo.get_catalog(pub)
+                                up_ts = copy.copy(cat.last_modified)
                                 up_ts = up_ts.replace(microsecond=0)
                                 if last >= up_ts:
                                         need_update = False
                         else:
-                                __clear_cache(repo)
+                                __clear_cache(depot, pub)
                 else:
-                        __clear_cache(repo)
+                        __clear_cache(depot, pub)
         return need_update, last
 
-def handle(depot, request, response):
+def handle(depot, request, response, pub):
         """If there have been package updates since we last generated the feed,
         update the feed and send it to the client.  Otherwise, send them the
         cached copy if it is available.
         """
 
-        repo = depot.repo
-        cfpath = __get_cache_pathname(repo)
+        cfpath = __get_cache_pathname(depot, pub)
 
         # First check to see if we already have a valid cache of the feed.
-        need_update, last = __cache_needs_update(repo)
+        need_update, last = __cache_needs_update(depot, pub)
 
         if need_update:
                 # Update always looks at feed.window seconds before the last
@@ -425,30 +390,10 @@
                 if last is None:
                         last = datetime.datetime.utcnow()
 
-                if repo.read_only and not repo.writable_root:
-                        # If the server is operating in readonly mode, the
-                        # feed will have to be generated every time.
-                        cf = cStringIO.StringIO()
-                        update(request, depot, last, cf)
-                        cf.seek(0)
-                        buf = cf.read()
-                        cf.close()
-
-                        # Now that the feed has been generated, set the headers
-                        # correctly and return it.
-                        response.headers['Content-type'] = MIME_TYPE
-
-                        # Return the current time and date in GMT.
-                        response.headers['Last-Modified'] = rfc822.formatdate()
-
-                        response.headers['Content-length'] = len(buf)
-                        return buf
-                else:
-                        # If the server isn't operating in readonly mode, the
-                        # feed can be generated and cached in inst_dir.
-                        cf = file(cfpath, "w")
-                        update(request, depot, last, cf)
-                        cf.close()
+                # Generate and cache the feed.
+                misc.makedirs(os.path.dirname(cfpath))
+                cf = file(cfpath, "w")
+                update(request, depot, last, cf, pub)
+                cf.close()
 
         return serve_file(cfpath, MIME_TYPE)
-
--- a/src/modules/server/repository.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/server/repository.py	Thu Aug 19 23:33:49 2010 -0700
@@ -21,36 +21,42 @@
 #
 # Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
 
+import cStringIO
+import codecs
 import datetime
 import errno
+import fcntl
 import logging
 import os
 import os.path
+import platform
 import shutil
-import signal
+import stat
 import sys
 import tempfile
-import threading
 import urllib
 
 import pkg.actions as actions
 import pkg.catalog as catalog
-import pkg.client.api_errors as api_errors
+import pkg.client.api_errors as apx
+import pkg.client.publisher as publisher
 import pkg.config as cfg
 import pkg.file_layout.file_manager as file_manager
 import pkg.fmri as fmri
 import pkg.indexer as indexer
-import pkg.manifest as manifest
+import pkg.manifest
+import pkg.p5i as p5i
 import pkg.portable as portable
 import pkg.misc as misc
-import pkg.pkgsubprocess as subprocess
+import pkg.nrlock
 import pkg.search_errors as se
 import pkg.query_parser as qp
+import pkg.server.catalog as old_catalog
 import pkg.server.query_parser as sqp
 import pkg.server.transaction as trans
-import pkg.version as version
-
-from pkg.misc import EmptyI, EmptyDict
+import pkg.version
+
+CURRENT_REPO_VERSION = 4
 from pkg.pkggzip import PkgGzipFile
 
 class RepositoryError(Exception):
@@ -77,8 +83,8 @@
         """
 
         def __str__(self):
-                return _("A package repository already exists at '%s'.") % \
-                    self.data
+                return _("A package repository (or directory) already exists "
+                    "at '%s'.") % self.data
 
 
 class RepositoryFileNotFoundError(RepositoryError):
@@ -95,6 +101,9 @@
         specified location."""
 
         def __str__(self):
+                if not self.data:
+                        return _("The specified path does not contain a valid "
+                            "package repository.")
                 return _("The path '%s' does not contain a valid package "
                     "repository.") % self.data
 
@@ -103,14 +112,44 @@
         """Used to indicate that the FMRI provided is invalid."""
 
 
+class RepositoryUnqualifiedFMRIError(RepositoryError):
+        """Used to indicate that the FMRI provided is valid, but is missing
+        publisher information."""
+
+        def __str__(self):
+                return _("This operation requires that a default publisher has "
+                    "been set or that a publisher be specified in the FMRI "
+                    "'%s'.") % self.data
+
+
 class RepositoryInvalidTransactionIDError(RepositoryError):
         """Used to indicate that an invalid Transaction ID was supplied."""
 
         def __str__(self):
-                return _("The specified Transaction ID '%s' is invalid.") % \
+                return _("No transaction matching '%s' could be found.") % \
                     self.data
 
 
+class RepositoryLockedError(RepositoryError):
+        """Used to indicate that the repository is currently locked by another
+        thread or process and cannot be modified."""
+
+        def __init__(self, hostname=None, pid=None):
+                ApiException.__init__(self)
+                self.hostname = hostname
+                self.pid = pid
+
+        def __str__(self):
+                if self.pid is not None:
+                        # Even if the host is none, use this message.
+                        return _("The repository cannot be modified as it is "
+                            "currently in use by another process: "
+                            "pid %(pid)s on %(host)s.") % {
+                            "pid": self.pid, "host": self.hostname }
+                return _("The repository cannot be modified as it is currently "
+                    "in use by another process.")
+
+
 class RepositoryManifestNotFoundError(RepositoryError):
         """Used to indicate that the requested manifest could not be found."""
 
@@ -128,6 +167,24 @@
                     "repository is used in mirror mode.")
 
 
+class RepositoryNoPublisherError(RepositoryError):
+        """Used to indicate that the requested repository operation could not be
+        completed as not default publisher has been set and one was not
+        specified.
+        """
+
+        def __str__(self):
+                return _("The requested operation could not be completed as a "
+                    "default publisher has not been configured.")
+
+
+class RepositoryNoSuchFileError(RepositoryError):
+        """Used to indicate that the file provided does not exist."""
+
+        def __str__(self):
+                return _("No such file '%s'.") % self.data
+
+
 class RepositoryReadOnlyError(RepositoryError):
         """Used to indicate that the requested operation could not be performed
         as the repository is currently read-only."""
@@ -155,324 +212,137 @@
                 return _("Search functionality is temporarily unavailable.")
 
 
+class RepositoryDuplicatePublisher(RepositoryError):
+        """Raised when the publisher specified for an operation already exists,
+        and so cannot be added again.
+        """
+
+        def __str__(self):
+                return _("Publisher '%s' already exists.") % self.data
+
+
+class RepositoryUnknownPublisher(RepositoryError):
+        """Raised when the publisher specified for an operation is unknown to
+        the repository.
+        """
+
+        def __str__(self):
+                if not self.data:
+                        return _("No publisher was specified or no default "
+                            "publisher has been configured for the repository.")
+                return _("No publisher matching '%s' could be found.") % \
+                    self.data
+
+
+class RepositoryVersionError(RepositoryError):
+        """Raised when the repository specified uses an unsupported format
+        (version).
+        """
+
+        def __init__(self, location, version):
+                RepositoryError.__init__(self)
+                self.location = location
+                self.version = version
+
+        def __str__(self):
+                return("The repository at '%(location)s' is version "
+                    "'%(version)s'; only versions up to are supported.") % \
+                    self.__dict__
+
+
 class RepositoryUnsupportedOperationError(RepositoryError):
         """Raised when the repository is unable to support an operation,
-        based upon its current configuration."""
+        based upon its current configuration.
+        """
 
         def __str__(self):
                 return("Operation not supported for this configuration.")
 
-class RepositoryUpgradeError(RepositoryError):
-        """Used to indicate that the specified repository root cannot be used
-        as the catalog or format of it is an older version that needs to be
-        upgraded before use and cannot be."""
-
-        def __str__(self):
-                return _("The format of the repository or its contents needs "
-                    "to be upgraded before it can be used to serve package "
-                    "data.  However, it is currently read-only and cannot be "
-                    "upgraded.  If using pkg.depotd, please restart the server "
-                    "without read-only so that the repository can be upgraded.")
-
-
-class Repository(object):
-        """A Repository object is a representation of data contained within a
-        pkg(5) repository and an interface to manipulate it."""
-
-        __catalog = None
-        __lock = None
-
-        def __init__(self, auto_create=False, catalog_root=None,
-            cfgpathname=None, file_root=None, fork_allowed=False,
-            index_root=None, log_obj=None, manifest_root=None, mirror=False,
-            properties=EmptyDict, read_only=False, repo_root=None,
-            trans_root=None, refresh_index=True,
+
+class _RepoStore(object):
+        """The _RepoStore object provides an interface for performing operations
+        on a set of package data contained within a repository.  This class is
+        intended only for use by the Repository class.
+        """
+
+        def __init__(self, file_root=None, log_obj=None, mirror=False, pub=None,
+            read_only=False, root=None,
             sort_file_max_size=indexer.SORT_FILE_MAX_SIZE, writable_root=None):
                 """Prepare the repository for use."""
 
                 # This lock is used to protect the repository from multiple
                 # threads modifying it at the same time.
-                self.__lock = threading.Lock()
-
-                self.auto_create = auto_create
-                self.cfg = None
-                self.cfgpathname = None
-                self.fork_allowed = fork_allowed
-                self.log_obj = log_obj
-                self.mirror = mirror
-                self.read_only = read_only
+                self.__lock = pkg.nrlock.NRLock()
+                self.__lockf = None
+
+                self.__catalog = None
+                self.__catalog_root = None
+                self.__file_root = None
+                self.__in_flight_trans = {}
+                self.__root = None
                 self.__sort_file_max_size = sort_file_max_size
                 self.__tmp_root = None
-                self.__file_root = None
-
-                # Set before repo root, since it's possible to have
-                # the file root in an entirely different location.  Repo
-                # root will govern file_root, if an argument to file_root
-                # is not supplied in __init__.
+                self.__writable_root = None
+                self.cache_store = None
+                self.catalog_version = -1
+                self.manifest_root = None
+                self.trans_root = None
+
+                self.log_obj = log_obj
+                self.mirror = mirror
+                self.publisher = pub
+                self.read_only = read_only
+
+                # Set before root, since it's possible to have the
+                # file_root in an entirely different location.  The root
+                # will govern file_root, if a value for file_root is not
+                # supplied.
                 if file_root:
-                        self.file_root = file_root
-
-                # Must be set before most other roots.
-                self.repo_root = repo_root
-
-                # These are all overrides for the default values that setting
-                # repo_root will provide.  If a caller provides one of these,
-                # they are responsible for creating the corresponding path
-                # and setting its mode appropriately.
-                if catalog_root:
-                        self.catalog_root = catalog_root
-                if index_root:
-                        self.index_root = index_root
-                if manifest_root:
-                        self.manifest_root = manifest_root
-                if trans_root:
-                        self.trans_root = trans_root
-
-                # Must be set before writable_root.
-                if self.mirror:
-                        self.__required_dirs = [self.file_root]
-                else:
-                        self.__required_dirs = [self.trans_root,
-                            self.manifest_root, self.catalog_root,
-                            self.file_root]
+                        self.__set_file_root(file_root)
+
+                # Must be set before remaining roots.
+                self.__set_root(root)
 
                 # Ideally, callers would just specify overrides for the feed
                 # cache root, index_root, etc.  But this must be set after all
                 # of the others above.
-                self.writable_root = writable_root
-
-                # Must be set after all other roots.
-                self.__optional_dirs = [self.index_root]
-
-                # Stats
-                self.catalog_requests = 0
-                self.manifest_requests = 0
-                self.file_requests = 0
-                self.flist_requests = 0
-                self.flist_files = 0
-                self.pkgs_renamed = 0
-
-                # The update_handle lock protects the update_handle variable.
-                # This allows update_handle to be checked and acted on in a
-                # consistent step, preventing the dropping of needed updates.
-                # The check at the top of refresh index should always be done
-                # prior to deciding to spin off a process for indexing as it
-                # prevents more than one indexing process being run at the same
-                # time.
-                self.__searchdb_update_handle_lock = threading.Lock()
-
-                if os.name == "posix" and self.fork_allowed:
-                        try:
-                                signal.signal(signal.SIGCHLD,
-                                    self._child_handler)
-                        except ValueError:
-                                self.__log("Tried to create signal handler in "
-                                    "a thread other than the main thread.")
-
-                self.__searchdb_update_handle = None
+                self.__set_writable_root(writable_root)
+
                 self.__search_available = False
-                self.__deferred_searchdb_updates = []
-                self.__deferred_searchdb_updates_lock = threading.Lock()
                 self.__refresh_again = False
 
                 # Initialize.
-                self.__lock_repository()
-                try:
-                        self.__init_config(cfgpathname=cfgpathname,
-                            properties=properties)
-                        self.__init_dirs()
-                        self.__init_state(refresh_index=refresh_index)
-                finally:
-                        self.__unlock_repository()
-
-        def _child_handler(self, sig, frame):
-                """ Handler method for the SIGCHLD signal.  Checks to see if the
-                search database update child has finished, and enables searching
-                if it finished successfully, or logs an error if it didn't.
-                """
-
+                self.__lock_rstore()
                 try:
-                        signal.signal(signal.SIGCHLD, self._child_handler)
-                except ValueError:
-                        self.__log("Tried to create signal handler in a thread "
-                            "other than the main thread.")
-
-                # If there's no update_handle, then another subprocess was
-                # spun off and that was what finished. If the poll() returns
-                # None, then while the indexer was running, another process
-                # that was spun off finished.
-                rval = None
-                if not self.__searchdb_update_handle:
-                        return
-                rval = self.__searchdb_update_handle.poll()
-                if rval == None:
-                        return
-
-                if rval == 0:
-                        self.__search_available = True
-                        self.__index_log("Search indexes updated and "
-                            "available.")
-                        # Need to acquire this lock to prevent the possibility
-                        # of a race condition with refresh_index where a needed
-                        # refresh is dropped. It is possible that an extra
-                        # refresh will be done with this code, but that refresh
-                        # should be very quick to finish.
-                        self.__searchdb_update_handle_lock.acquire()
-                        self.__searchdb_update_handle = None
-                        self.__searchdb_update_handle_lock.release()
-
-                        if self.__refresh_again:
-                                self.__refresh_again = False
-                                self.refresh_index()
-                elif rval > 0:
-                        # If the refresh of the index failed, defensively
-                        # declare that search is unavailable.
-                        self.__index_log("ERROR building search database, exit "
-                            "code: %s" % rval)
-                        try:
-                                self.__log(
-                                    self.__searchdb_update_handle.stderr.read())
-                                self.__searchdb_update_handle.stderr.read()
-                        except KeyboardInterrupt:
-                                raise
-                        except:
-                                pass
-                        self.__searchdb_update_handle_lock.acquire()
-                        self.__searchdb_update_handle = None
-                        self.__searchdb_update_handle_lock.release()
+                        self.__init_state()
+                finally:
+                        self.__unlock_rstore()
 
         def __mkdtemp(self):
                 """Create a temp directory under repository directory for
                 various purposes."""
 
-                if not self.repo_root:
+                if not self.root:
                         return
 
-                root = self.repo_root
                 if self.writable_root:
                         root = self.writable_root
+                else:
+                        root = self.root
 
                 tempdir = os.path.normpath(os.path.join(root, "tmp"))
+                misc.makedirs(tempdir)
                 try:
-                        if not os.path.exists(tempdir):
-                                os.makedirs(tempdir)
                         return tempfile.mkdtemp(dir=tempdir)
                 except EnvironmentError, e:
                         if e.errno == errno.EACCES:
-                                raise api_errors.PermissionsException(
+                                raise apx.PermissionsException(
                                     e.filename)
                         if e.errno == errno.EROFS:
-                                raise api_errors.ReadOnlyFileSystemException(
+                                raise apx.ReadOnlyFileSystemException(
                                     e.filename)
                         raise
 
-        def __upgrade(self):
-                """Upgrades the repository's format and contents if needed."""
-
-                def get_file_lm(pathname):
-                        try:
-                                mod_time = os.stat(pathname).st_mtime
-                        except EnvironmentError, e:
-                                if e.errno == errno.ENOENT:
-                                        return None
-                                raise
-                        return datetime.datetime.utcfromtimestamp(mod_time)
-
-                if not self.catalog_root:
-                        return
-
-                # To determine if an upgrade is needed, first check for a v0
-                # catalog attrs file.
-                need_upgrade = False
-                v0_attrs = os.path.join(self.catalog_root, "attrs")
-
-                # The only place a v1 catalog should exist, at all,
-                # is either in self.catalog_root, or in a subdirectory
-                # of self.writable_root if a v0 catalog exists.
-                v1_cat = None
-                writ_cat_root = None
-                if self.writable_root:
-                        writ_cat_root = os.path.join(
-                            self.writable_root, "catalog")
-                        v1_cat = catalog.Catalog(
-                            meta_root=writ_cat_root, read_only=True)
-
-                v0_lm = None
-                if os.path.exists(v0_attrs):
-                        # If a v0 catalog exists, then assume any existing v1
-                        # catalog needs to be kept in sync if it exists.  If
-                        # one doesn't exist, then it needs to be created.
-                        v0_lm = get_file_lm(v0_attrs)
-                        if not v1_cat or not v1_cat.exists or \
-                            v0_lm != v1_cat.last_modified:
-                                need_upgrade = True
-
-                if writ_cat_root and not self.read_only:
-                        # If a writable root was specified, but the server is
-                        # not in read-only mode, then the catalog must not be
-                        # stored using the writable root (this is consistent
-                        # with the storage of package data in this case).  As
-                        # such, destroy any v1 catalog data that might exist
-                        # and proceed.
-                        shutil.rmtree(writ_cat_root, True)
-                        writ_cat_root = None
-                        if os.path.exists(v0_attrs) and not self.catalog.exists:
-                                # A v0 catalog exists, but no v1 catalog exists;
-                                # this can happen when a repository that was
-                                # previously run with --writable-root and
-                                # --readonly is now being run with only
-                                # --writable-root.
-                                need_upgrade = True
-                elif writ_cat_root and v0_lm and self.read_only:
-                        # The catalog lives in the writable_root if a v0 catalog
-                        # exists, writ_cat_root is set, and readonly is True.
-                        self.catalog_root = writ_cat_root
-
-                if not need_upgrade or self.mirror:
-                        # If an upgrade isn't needed, or this is a mirror, then
-                        # nothing more should be done to the existing catalog
-                        # data.
-                        return
-
-                if self.read_only and not self.writable_root:
-                        # Any further operations would attempt to alter the
-                        # existing catalog data, which can't be done due to
-                        # read_only status.
-                        raise RepositoryUpgradeError()
-
-                if self.catalog.exists:
-                        # v1 catalog should be destroyed if it exists already.
-                        self.catalog.destroy()
-                elif writ_cat_root and not os.path.exists(writ_cat_root):
-                        try:
-                                os.mkdir(writ_cat_root, misc.PKG_DIR_MODE)
-                        except EnvironmentError, e:
-                                if e.errno == errno.EACCES:
-                                        raise api_errors.PermissionsException(
-                                            e.filename)
-                                if e.errno == errno.EROFS:
-                                        raise api_errors.ReadOnlyFileSystemException(
-                                            e.filename)
-                                raise
-
-                # To upgrade the repository, the catalog will have to
-                # be rebuilt.
-                self.__log(_("Upgrading repository; this process will "
-                    "take some time."))
-                self.__rebuild(lm=v0_lm)
-
-                if not self.read_only and self.repo_root:
-                        v0_cat = os.path.join(self.repo_root, "catalog",
-                            "catalog")
-                        for f in v0_attrs, v0_cat:
-                                if os.path.exists(f):
-                                        portable.remove(f)
-
-                        # If this fails, it doesn't really matter, but it should
-                        # be removed if possible.
-                        shutil.rmtree(os.path.join(self.repo_root, "updatelog"),
-                            True)
-
         def __add_package(self, pfmri, manifest=None):
                 """Private version; caller responsible for repository
                 locking."""
@@ -506,8 +376,17 @@
                 except se.InconsistentIndexException:
                         pass
                 if cie:
+                        if not self.__search_available:
+                                # State change to available.
+                                self.__index_log("Search Available")
+                                self.reset_search()
                         self.__search_available = True
-                        self.__index_log("Search Available")
+                else:
+                        if self.__search_available:
+                                # State change to unavailable.
+                                self.__index_log("Search Unavailable")
+                                self.reset_search()
+                        self.__search_available = False
 
         def __destroy_catalog(self):
                 """Destroy the catalog."""
@@ -517,13 +396,13 @@
                         shutil.rmtree(self.catalog_root)
 
         @staticmethod
-        def __fmri_from_path(pkg, ver):
+        def __fmri_from_path(pkgpath, ver):
                 """Helper method that takes the full path to the package
                 directory and the name of the manifest file, and returns an FMRI
                 constructed from the information in those components."""
 
-                v = version.Version(urllib.unquote(ver), None)
-                f = fmri.PkgFmri(urllib.unquote(os.path.basename(pkg)))
+                v = pkg.version.Version(urllib.unquote(ver), None)
+                f = fmri.PkgFmri(urllib.unquote(os.path.basename(pkgpath)))
                 f.version = v
                 return f
 
@@ -532,7 +411,7 @@
                 to its usage as a callback."""
 
                 mpath = self.manifest(pfmri)
-                m = manifest.Manifest()
+                m = pkg.manifest.Manifest()
                 try:
                         f = open(mpath, "rb")
                         content = f.read()
@@ -543,202 +422,298 @@
                                     e.filename)
                         raise
                 m.set_fmri(None, pfmri)
-                m.set_content(content, EmptyI, signatures=sig)
+                m.set_content(content, misc.EmptyI, signatures=sig)
                 return m
 
-        def __get_catalog_root(self):
-                return self.__catalog_root
-
-        def __get_repo_root(self):
-                return self.__repo_root
-
-        def __get_writable_root(self):
-                return self.__writable_root
-
         def __index_log(self, msg):
                 return self.__log(msg, "INDEX")
 
-        def __init_config(self, cfgpathname=None, properties=EmptyDict):
-                """Private helper function to initialize configuration."""
-
-                # Load configuration information.
-                if not cfgpathname:
-                        cfgpathname = self.cfgpathname
-                self.__load_config(cfgpathname, properties=properties)
-
-                # Set any specified properties again for cases where no existing
-                # configuration could be loaded and so that individual property
-                # validation messages can be re-raised to caller.
-                for section in properties:
-                        for prop, value in properties[section].iteritems():
-                                self.cfg.set_property(section, prop, value)
-
-        def __init_dirs(self, create=False):
-                """Verify and instantiate repository directory structure."""
-                if not self.repo_root:
-                        return
-                emsg = _("repository directories incomplete")
-                for d in self.__required_dirs + self.__optional_dirs:
-                        if create or self.auto_create or (self.writable_root and
-                            d.startswith(self.writable_root)):
-                                try:
-                                        os.makedirs(d)
-                                except EnvironmentError, e:
-                                        if e.errno in (errno.EACCES,
-                                            errno.EROFS):
-                                                emsg = _("repository "
-                                                    "directories not writeable "
-                                                    "by current user id or "
-                                                    "group and are incomplete")
-                                        elif e.errno != errno.EEXIST:
-                                                raise
-
-                for d in self.__required_dirs:
-                        if not os.path.exists(d):
-                                if create or self.auto_create:
-                                        raise RepositoryError(emsg)
-                                raise RepositoryInvalidError(self.repo_root)
-
-                searchdb_file = os.path.join(self.repo_root, "search")
-                for ext in ".pag", ".dir":
-                        try:
-                                os.unlink(searchdb_file + ext)
-                        except OSError:
-                                # If these can't be removed, it doesn't matter.
-                                continue
-
-        def __load_config(self, cfgpathname=None, properties=EmptyDict):
-                """Load stored configuration data and configure the repository
-                appropriately."""
-
-                if not self.repo_root:
-                        self.cfg = RepositoryConfig(target=cfgpathname,
-                            overrides=properties)
-                        return
-
-                default_cfg_path = False
-
-                # Now load our repository configuration / metadata.
-                if not cfgpathname:
-                        cfgpathname = os.path.join(self.repo_root,
-                            "cfg_cache")
-                        default_cfg_path = True
-
-                # Create or load the repository configuration.
-                self.cfg = RepositoryConfig(target=cfgpathname,
-                    overrides=properties)
-
-                self.cfgpathname = cfgpathname
-
-        def __load_in_flight(self):
-                """Walk trans_root, acquiring valid transaction IDs."""
-
-                if self.mirror:
-                        # Mirrors don't permit publication.
-                        return
+        def __get_transaction(self, trans_id):
+                """Return the in-flight transaction with the matching trans_id.
+                """
 
                 if not self.trans_root:
-                        return
-
-                self.__in_flight_trans = {}
-                for txn in os.walk(self.trans_root):
-                        if txn[0] == self.trans_root:
-                                continue
+                        raise RepositoryInvalidTransactionIDError(trans_id)
+
+                try:
+                        return self.__in_flight_trans[trans_id]
+                except KeyError:
+                        # Transaction not cached already, so load and
+                        # cache if possible.
                         t = trans.Transaction()
-                        t.reopen(self, txn[0])
-                        self.__in_flight_trans[t.get_basename()] = t
-
-        def __lock_repository(self):
+                        try:
+                                t.reopen(self, trans_id)
+                        except trans.TransactionUnknownIDError:
+                                raise RepositoryInvalidTransactionIDError(
+                                    trans_id)
+
+                        if not t:
+                                raise RepositoryInvalidTransactionIDError(
+                                    trans_id)
+                        self.__in_flight_trans[trans_id] = t
+                        return t
+
+        def __discard_transaction(self, trans_id):
+                """Discard any state information cached for a Transaction."""
+                self.__in_flight_trans.pop(trans_id, None)
+
+        def get_lock_status(self):
+                """Returns a tuple of booleans of the form (storage_locked,
+                index_locked).
+                """
+
+                storage_locked = False
+                try:
+                        self.__lock_rstore(blocking=False)
+                except RepositoryLockedError:
+                        storage_locked = True
+                except:
+                        pass
+                else:
+                        self.__unlock_rstore()
+
+                index_locked = False
+                if self.index_root and os.path.exists(self.index_root) and \
+                    (not self.read_only or self.writable_root):
+                        try:
+                                ind = indexer.Indexer(self.index_root,
+                                    self._get_manifest, self.manifest,
+                                    log=self.__index_log,
+                                    sort_file_max_size=self.__sort_file_max_size)
+                                ind.lock()
+                        except se.IndexLockedException:
+                                index_locked = True
+                        except:
+                                pass
+                        finally:
+                                if ind and not index_locked:
+                                        # If ind is defined, the index exists,
+                                        # and a lock was obtained because
+                                        # index_locked is False, so call
+                                        # unlock().
+                                        ind.unlock()
+                return storage_locked, index_locked
+
+        def get_status(self):
+                """Return a dictionary of status information about the
+                repository storage object.
+                """
+
+                try:
+                        cat = self.catalog
+                        pkg_count = cat.package_count
+                        pkg_ver_count = cat.package_version_count
+                        lcat_update = catalog.datetime_to_basic_ts(
+                            cat.last_modified)
+                except:
+                        # Can't get the info, drive on.
+                        pkg_count = 0
+                        pkg_ver_count = 0
+                        lcat_update = ""
+
+                storage_locked, index_locked = self.get_lock_status()
+                if storage_locked:
+                        rstatus = "processing"
+                elif index_locked:
+                        rstatus = "indexing"
+                else:
+                        rstatus = "online"
+
+                return {
+                    "package-count": pkg_count,
+                    "package-version-count": pkg_ver_count,
+                    "last-catalog-update": lcat_update,
+                    "status": rstatus,
+                }
+
+        def __lock_rstore(self, blocking=True, process=True):
                 """Locks the repository preventing multiple consumers from
                 modifying it during operations."""
 
-                # XXX need filesystem lock too?
-                self.__lock.acquire()
+                # First, attempt to obtain a thread lock.
+                if not self.__lock.acquire(blocking=blocking):
+                        raise RepositoryLockedError()
+
+                if not process or (self.read_only and
+                    not self.writable_root) or not (self.__tmp_root and
+                    os.path.exists(self.__tmp_root)):
+                        # Process lock wasn't desired, or repository structure
+                        # doesn't exist yet or is readonly so a file lock cannot
+                        # be obtained.
+                        return
+
+                try:
+                        # Attempt to obtain a file lock.
+                        self.__lockf = self.__lock_process(self.__tmp_root,
+                            blocking=blocking)
+                except:
+                        # If process lock fails, ensure thread lock is released.
+                        self.__lock.release()
+                        raise
+
+        @staticmethod
+        def __lock_process(lock_dir, blocking=True):
+                """Locks the repository to prevent modification by other
+                processes."""
+
+                # Attempt to obtain a file lock for the repository.
+                lfpath = os.path.join(lock_dir, "lock")
+
+                lock_type = fcntl.LOCK_EX
+                if not blocking:
+                        lock_type |= fcntl.LOCK_NB
+
+                # Attempt an initial open of the lock file.
+                lf = None
+                try:
+                        lf = open(lfpath, "ab+")
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise apx.PermissionsException(e.filename)
+                        if e.errno == errno.EROFS:
+                                raise apx.ReadOnlyFileSystemException(
+                                    e.filename)
+                        raise
+
+                # Attempt to lock the file.
+                try:
+                        fcntl.lockf(lf, lock_type)
+                except IOError, e:
+                        if e.errno not in (errno.EAGAIN, errno.EACCES):
+                                raise
+
+                        # If the lock failed (because it is likely contended),
+                        # then extract the information about the lock acquirer
+                        # and raise an exception.
+                        pid_data = lf.read().strip()
+                        pid, hostname, lock_ts = pid_data.split("\n", 3)
+                        raise RepositoryLockedError(pid=pid, hostname=hostname)
+
+                # Store lock time as ISO-8601 basic UTC timestamp in lock file.
+                lock_ts = catalog.now_to_basic_ts()
+
+                # Store information about the lock acquirer and write it.
+                try:
+                        lf.truncate(0)
+                        lf.write("\n".join((str(os.getpid()),
+                            platform.node(), lock_ts, "\n")))
+                        lf.flush()
+                        return lf
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise apx.PermissionsException(
+                                    e.filename)
+                        if e.errno == errno.EROFS:
+                                raise apx.ReadOnlyFileSystemException(
+                                    e.filename)
+                        raise
 
         def __log(self, msg, context="", severity=logging.INFO):
                 if self.log_obj:
                         self.log_obj.log(msg=msg, context=context,
                             severity=severity)
 
-        def __rebuild(self, lm=None, incremental=False):
+        def __rebuild(self, build_catalog=True, build_index=False, lm=None,
+            incremental=False):
                 """Private version; caller responsible for repository
                 locking."""
 
-                if not self.manifest_root:
+                if not (build_catalog or build_index) or not self.manifest_root:
+                        # Nothing to do.
                         return
 
-                default_pub = self.cfg.get_property("publisher", "prefix")
-
-                if self.read_only:
-                        # Temporarily mark catalog as not read-only so that it
-                        # can be modified.
-                        self.catalog.read_only = False
-
-                # Set batch_mode for catalog to speed up rebuild.
-                self.catalog.batch_mode = True
-
-                # Pointless to log incremental updates since a new catalog
-                # is being built.  This also helps speed up rebuild.
-                self.catalog.log_updates = incremental
-
-                def add_package(f):
-                        m = self._get_manifest(f, sig=True)
-                        if "pkg.fmri" in m:
-                                f = fmri.PkgFmri(m["pkg.fmri"])
-                        if default_pub and not f.publisher:
-                                f.publisher = default_pub
-                        self.__add_package(f, manifest=m)
-                        self.__log(str(f))
-
-                # XXX eschew os.walk in favor of another os.listdir here?
-                for pkg in os.walk(self.manifest_root):
-                        if pkg[0] == self.manifest_root:
-                                continue
-
-                        for e in os.listdir(pkg[0]):
-                                f = self.__fmri_from_path(pkg[0], e)
-                                try:
-                                        add_package(f)
-                                except (api_errors.InvalidPackageErrors,
-                                    actions.ActionError), e:
-                                        # Don't add packages with corrupt
-                                        # manifests to the catalog.
-                                        self.__log(_("Skipping %(fmri)s; "
-                                            "invalid manifest: %(error)s") % {
-                                            "fmri": f, "error": e })
-                                except api_errors.DuplicateCatalogEntry, e:
-                                        # ignore dups if incremental mode
-                                        if incremental:
-                                                continue
-                                        raise
-
-                # Private add_package doesn't automatically save catalog
-                # so that operations can be batched (there is significant
-                # overhead in writing the catalog).
-                self.catalog.batch_mode = False
-                self.catalog.log_updates = True
-                self.catalog.read_only = self.read_only
-                self.catalog.finalize()
-                self.__save_catalog(lm=lm)
+                if build_catalog:
+                        self.__destroy_catalog()
+                        default_pub = self.publisher
+                        if self.read_only:
+                                # Temporarily mark catalog as not read-only so
+                                # that it can be modified.
+                                self.catalog.read_only = False
+
+                        # Set batch_mode for catalog to speed up rebuild.
+                        self.catalog.batch_mode = True
+
+                        # Pointless to log incremental updates since a new 
+                        # catalog is being built.  This also helps speed up
+                        # rebuild.
+                        self.catalog.log_updates = incremental
+
+                        def add_package(f):
+                                m = self._get_manifest(f, sig=True)
+                                if "pkg.fmri" in m:
+                                        f = fmri.PkgFmri(m["pkg.fmri"])
+                                if default_pub and not f.publisher:
+                                        f.publisher = default_pub
+                                self.__add_package(f, manifest=m)
+                                self.__log(str(f))
+
+                        # XXX eschew os.walk in favor of another os.listdir
+                        # here?
+                        for pkgpath in os.walk(self.manifest_root):
+                                if pkgpath[0] == self.manifest_root:
+                                        continue
+
+                                for e in os.listdir(pkgpath[0]):
+                                        f = self.__fmri_from_path(pkgpath[0], e)
+                                        try:
+                                                add_package(f)
+                                        except (apx.InvalidPackageErrors,
+                                            actions.ActionError), e:
+                                                # Don't add packages with
+                                                # corrupt manifests to the
+                                                # catalog.
+                                                self.__log(_("Skipping "
+                                                    "%(fmri)s; invalid "
+                                                    "manifest: %(error)s") % {
+                                                    "fmri": f, "error": e })
+                                        except apx.DuplicateCatalogEntry, e:
+                                                # Raise dups if not in
+                                                # incremental mode.
+                                                if not incremental:
+                                                        raise
+
+                        # Private add_package doesn't automatically save catalog
+                        # so that operations can be batched (there is
+                        # significant overhead in writing the catalog).
+                        self.catalog.batch_mode = False
+                        self.catalog.log_updates = True
+                        self.catalog.read_only = self.read_only
+                        self.catalog.finalize()
+                        self.__save_catalog(lm=lm)
 
                 if not incremental and self.index_root and \
                     os.path.exists(self.index_root):
-                        # Search data is no longer valid and search can't handle
-                        # package removal, so discard existing search data.
+                        # Only discard search data if this isn't an incremental
+                        # rebuild, and there's an index to destroy.
+                        ind = indexer.Indexer(self.index_root,
+                            self._get_manifest,
+                            self.manifest,
+                            log=self.__index_log,
+                            sort_file_max_size=self.__sort_file_max_size)
+                        ind.lock(blocking=False)
                         try:
                                 shutil.rmtree(self.index_root)
                         except EnvironmentError, e:
                                 if e.errno == errno.EACCES:
-                                        raise api_errors.PermissionsException(
+                                        raise apx.PermissionsException(
                                             e.filename)
                                 if e.errno == errno.EROFS:
-                                        raise api_errors.ReadOnlyFileSystemException(
+                                        raise apx.ReadOnlyFileSystemException(
                                             e.filename)
                                 if e.errno != errno.ENOENT:
                                         raise
-                self.__init_dirs(create=True)
-
-        def __refresh_index(self, synchronous=False):
+                        finally:
+                                ind.unlock()
+
+                        # Discard in-memory search data.
+                        self.reset_search()
+
+                if build_index:
+                        self.__refresh_index()
+                else:
+                        self.__check_search()
+
+        def __refresh_index(self):
                 """Private version; caller responsible for repository
                 locking."""
 
@@ -747,80 +722,28 @@
                 if self.read_only and not self.writable_root:
                         raise RepositoryReadOnlyError()
 
-                self.__searchdb_update_handle_lock.acquire()
-
-                if self.__searchdb_update_handle:
-                        self.__refresh_again = True
-                        self.__searchdb_update_handle_lock.release()
-                        return
-
                 cat = self.catalog
-                forked = False
-
-                try:
-                        fmris_to_index = indexer.Indexer.check_for_updates(
-                            self.index_root, cat)
-
-                        pub = self.cfg.get_property("publisher", "prefix")
-                        if fmris_to_index:
-                                if os.name == "posix" and self.fork_allowed:
-                                        cmd = self.__whence(sys.argv[0])
-                                        args = (sys.executable, cmd,
-                                            "--refresh-index", "-d",
-                                            self.repo_root)
-                                        if pub:
-                                                args += ("--set-property",
-                                                    "publisher.prefix=%s" % pub)
-                                        if os.path.normpath(
-                                            self.index_root) != \
-                                            os.path.normpath(os.path.join(
-                                            self.repo_root, "index")):
-                                                writ, t = os.path.split(
-                                                    self.index_root)
-                                                args += ("--writable-root",
-                                                    writ)
-                                        if self.read_only:
-                                                args += ("--readonly",)
-                                        try:
-                                                self.__searchdb_update_handle = \
-                                                    subprocess.Popen(args,
-                                                    stderr=subprocess.STDOUT)
-                                        except Exception, e:
-                                                self.__log("Starting the "
-                                                    "indexing process failed: "
-                                                    "%s" % e)
-                                                raise
-                                        forked = True
-                                else:
-                                        self.run_update_index()
-                        else:
-                                # Since there is nothing to index, setup
-                                # the index and declare search available.
-                                # We only log this if this represents
-                                # a change in status of the server.
-                                ind = indexer.Indexer(self.index_root,
-                                    self._get_manifest,
-                                    self.manifest,
-                                    log=self.__index_log,
-                                    sort_file_max_size=self.__sort_file_max_size)
-                                ind.setup()
-                                if not self.__search_available:
-                                        self.__index_log("Search Available")
-                                self.__search_available = True
-                finally:
-                        self.__searchdb_update_handle_lock.release()
-                        if forked and synchronous:
-                                while self.__searchdb_update_handle is not None:
-                                        try:
-                                                self.__searchdb_update_handle.wait()
-                                                self.__searchdb_update_handle = None
-                                        except OSError, e:
-                                                if e.errno == errno.EINTR:
-                                                        continue
-                                                break
-
-
-        def __init_state(self, refresh_index=True):
+                self.__index_log("Checking for updated package data.")
+                fmris_to_index = indexer.Indexer.check_for_updates(
+                    self.index_root, cat)
+
+                if fmris_to_index:
+                        return self.__run_update_index()
+
+                # Since there is nothing to index, setup the index
+                # and declare search available.  This is only logged
+                # if this represents a change in status of the server.
+                ind = indexer.Indexer(self.index_root,
+                    self._get_manifest,
+                    self.manifest,
+                    log=self.__index_log,
+                    sort_file_max_size=self.__sort_file_max_size)
+                ind.setup()
+                if not self.__search_available:
+                        self.__index_log("Search Available")
+                self.__search_available = True
+
+        def __init_state(self):
                 """Private version; caller responsible for repository
                 locking."""
 
@@ -828,57 +751,158 @@
                 # when needed).
                 self.__catalog = None
 
-                # Load in-flight transaction information.
-                self.__load_in_flight()
-
-                # Ensure default configuration is written.
-                self.__write_config()
-
-                # Ensure repository state is current before attempting
-                # to load it.
-                self.__upgrade()
-
-                if self.mirror or not self.repo_root:
-                        # In mirror-mode, or no repo_root, nothing to do.
+                # Determine location and version of catalog data.
+                self.__init_catalog()
+
+                # Prepare search for use (ensuring most current data is loaded).
+                self.reset_search()
+
+                if self.mirror:
+                        # In mirror mode, nothing more to do.
                         return
 
                 # If no catalog exists on-disk yet, ensure an empty one does
                 # so that clients can discern that a repository has an empty
-                # empty catalog, as opposed to missing one entirely (which
-                # could easily happen with multiple origins).  This must be
-                # done before the search checks below.
-                if not self.read_only and not self.catalog.exists:
-                        self.catalog.save()
-
-                if refresh_index and not self.read_only or self.writable_root:
+                # catalog, as opposed to missing one entirely (which could
+                # easily happen with multiple origins).  This must be done
+                # before the search checks below.
+                if not self.read_only and self.catalog_root and \
+                    not self.catalog.exists:
+                        self.__save_catalog()
+
+                self.__check_search()
+
+        def __init_catalog(self):
+                """Private function to determine version and location of
+                catalog data.  This will also perform any necessary
+                transformations of existing catalog data if the repository
+                is read-only and a writable_root has been provided.
+                """
+
+                # Reset versions to default.
+                self.catalog_version = -1
+
+                if not self.catalog_root or self.mirror:
+                        return
+
+                def get_file_lm(pathname):
                         try:
-                                try:
-                                        self.__refresh_index()
-                                except se.InconsistentIndexException, e:
-                                        s = _("Index corrupted or out of date. "
-                                            "Removing old index directory (%s) "
-                                            " and rebuilding search "
-                                            "indexes.") % e.cause
-                                        self.__log(s, "INDEX")
-                                        shutil.rmtree(self.index_root)
-                                        try:
-                                                self.__refresh_index()
-                                        except se.IndexingException, e:
-                                                self.__log(str(e), "INDEX")
-                                except se.IndexingException, e:
-                                        self.__log(str(e), "INDEX")
+                                mod_time = os.stat(pathname).st_mtime
                         except EnvironmentError, e:
-                                if e.errno in (errno.EACCES, errno.EROFS):
-                                        if self.writable_root:
-                                                raise RepositoryError(
-                                                    _("writable root not "
-                                                    "writable by current user "
-                                                    "id or group."))
-                                        raise RepositoryError(_("unable to "
-                                            "write to index directory."))
+                                if e.errno == errno.ENOENT:
+                                        return None
                                 raise
-                else:
-                        self.__check_search()
+                        return datetime.datetime.utcfromtimestamp(mod_time)
+
+                # To determine if a transformation is needed, first check for a
+                # v0 catalog attrs file.
+                need_transform = False
+                v0_attrs = os.path.join(self.catalog_root, "attrs")
+
+                # The only place a v1 catalog should exist, at all,
+                # is either in self.catalog_root, or in a subdirectory
+                # of self.writable_root if a v0 catalog exists.
+                v1_cat = None
+                writ_cat_root = None
+                if self.writable_root:
+                        writ_cat_root = os.path.join(
+                            self.writable_root, "catalog")
+                        v1_cat = catalog.Catalog(
+                            meta_root=writ_cat_root, read_only=True)
+
+                v0_lm = None
+                if os.path.exists(v0_attrs):
+                        # If a v0 catalog exists, then assume any existing v1
+                        # catalog needs to be kept in sync if it exists.  If
+                        # one doesn't exist, then it needs to be created.
+                        v0_lm = get_file_lm(v0_attrs)
+                        if not v1_cat or not v1_cat.exists or \
+                            v0_lm != v1_cat.last_modified:
+                                need_transform = True
+
+                if writ_cat_root and not self.read_only:
+                        # If a writable root was specified, but the server is
+                        # not in read-only mode, then the catalog must not be
+                        # stored using the writable root (this is consistent
+                        # with the storage of package data in this case).  As
+                        # such, destroy any v1 catalog data that might exist
+                        # and proceed.
+                        shutil.rmtree(writ_cat_root, True)
+                        writ_cat_root = None
+                        if os.path.exists(v0_attrs) and not self.catalog.exists:
+                                # A v0 catalog exists, but no v1 catalog exists;
+                                # this can happen when a repository that was
+                                # previously run with writable-root and
+                                # read_only is now being run with only
+                                # writable_root.
+                                need_transform = True
+                elif writ_cat_root and v0_lm and self.read_only:
+                        # The catalog lives in the writable_root if a v0 catalog
+                        # exists, writ_cat_root is set, and readonly is True.
+                        self.__set_catalog_root(writ_cat_root)
+
+                if self.mirror:
+                        need_transform = False
+
+                if need_transform and self.read_only and not self.writable_root:
+                        # Catalog data can't be transformed.
+                        need_transform = False
+
+                if need_transform:
+                        # v1 catalog should be destroyed if it exists already.
+                        self.catalog.destroy()
+
+                        # Create the transformed catalog.
+                        self.__log(_("Transforming repository catalog; this "
+                            "process will take some time."))
+                        self.__rebuild(lm=v0_lm)
+
+                        if not self.read_only and self.root:
+                                v0_cat = os.path.join(self.root,
+                                    "catalog", "catalog")
+                                for f in v0_attrs, v0_cat:
+                                        if os.path.exists(f):
+                                                portable.remove(f)
+
+                                # If this fails, it doesn't really matter, but
+                                # it should be removed if possible.
+                                shutil.rmtree(os.path.join(self.root,
+                                    "updatelog"), True)
+
+                # Determine effective catalog version after all transformation
+                # work is complete.
+                if os.path.exists(v0_attrs):
+                        # The only place a v1 catalog should exist, at all, is
+                        # either in catalog_root or in a subdirectory of
+                        # writable_root if a v0 catalog exists.
+                        v1_cat = None
+                        # If a writable root was specified, but the repository
+                        # is not in read-only mode, then the catalog must not be
+                        # stored using the writable root (this is consistent
+                        # with the storage of package data in this case).
+                        if self.writable_root and self.read_only:
+                                writ_cat_root = os.path.join(
+                                    self.writable_root, "catalog")
+                                v1_cat = catalog.Catalog(
+                                    meta_root=writ_cat_root, read_only=True)
+
+                        if v1_cat and v1_cat.exists:
+                                self.catalog_version = 1
+                                self.__set_catalog_root(v1_cat.meta_root)
+                        else:
+                                self.catalog_version = 0
+                elif self.catalog.exists:
+                        self.catalog_version = 1
+
+                if self.catalog_version >= 1 and not self.publisher:
+                        # If there's no information available to determine
+                        # the publisher identity, then assume it's the first
+                        # publisher in this repository store's catalog.
+                        # (This is reasonably safe since there should only
+                        # ever be one.)
+                        pubs = list(p for p in self.catalog.publishers())
+                        if pubs:
+                                self.publisher = pubs[0]
 
         def __save_catalog(self, lm=None):
                 """Private helper function that attempts to save the catalog in
@@ -886,7 +910,7 @@
 
                 # Ensure new catalog is created in a temporary location so that
                 # it can be renamed into place *after* creation to prevent
-                # unexpected failure from causing future upgrades to fail.
+                # unexpected failure causing future updates to fail.
                 old_cat_root = self.catalog_root
                 tmp_cat_root = self.__mkdtemp()
 
@@ -919,17 +943,17 @@
                                                             entry[-1]
                                                 else:
                                                         msg += "%s\n" % entry
-                                raise api_errors.UnknownErrors(msg)
+                                raise apx.UnknownErrors(msg)
                         elif e.errno == errno.EACCES or e.errno == errno.EPERM:
-                                raise api_errors.PermissionsException(
+                                raise apx.PermissionsException(
                                     e.filename)
                         elif e.errno == errno.EROFS:
-                                raise api_errors.ReadOnlyFileSystemException(
+                                raise apx.ReadOnlyFileSystemException(
                                     e.filename)
                         raise
 
                 # Save the new catalog data in the temporary location.
-                self.catalog_root = tmp_cat_root
+                self.__set_catalog_root(tmp_cat_root)
                 if lm:
                         self.catalog.last_modified = lm
                 self.catalog.save()
@@ -945,10 +969,13 @@
                 # Finally, rename the new catalog data into place, reset the
                 # catalog's location, and remove the old catalog data.
                 shutil.move(tmp_cat_root, old_cat_root)
-                self.catalog_root = old_cat_root
+                self.__set_catalog_root(old_cat_root)
                 if orig_cat_root:
                         shutil.rmtree(orig_cat_root)
 
+                # Set catalog version.
+                self.catalog_version = self.catalog.version
+
         def __set_catalog_root(self, root):
                 self.__catalog_root = root
                 if self.__catalog:
@@ -956,55 +983,54 @@
                         # its meta_root.
                         self.catalog.meta_root = root
 
-        def __set_repo_root(self, root):
+        def __set_root(self, root):
                 if root:
                         root = os.path.abspath(root)
-                        self.__repo_root = root
+                        self.__root = root
                         self.__tmp_root = os.path.join(root, "tmp")
-                        self.catalog_root = os.path.join(root, "catalog")
-                        self.feed_cache_root = root
+                        self.__set_catalog_root(os.path.join(root, "catalog"))
                         self.index_root = os.path.join(root, "index")
                         self.manifest_root = os.path.join(root, "pkg")
                         self.trans_root = os.path.join(root, "trans")
                         if not self.file_root:
-                                self.file_root = os.path.join(root, "file")
+                                self.__set_file_root(os.path.join(root, "file"))
                 else:
-                        self.__repo_root = None
-                        self.catalog_root = None
-                        self.feed_cache_root = None
+                        self.__root = None
+                        self.__set_catalog_root(None)
                         self.index_root = None
                         self.manifest_root = None
                         self.trans_root = None
- 
-        def __get_file_root(self):
-                return self.__file_root
 
         def __set_file_root(self, root):
                 self.__file_root = root
+                if not root:
+                        self.cache_store = None
+                        return
+
                 try:
                         self.cache_store = file_manager.FileManager(root,
                             self.read_only)
                 except file_manager.NeedToModifyReadOnlyFileManager:
-                        if self.repo_root:
+                        if self.root:
                                 try:
-                                        fs = os.lstat(self.repo_root)
+                                        os.lstat(self.root)
                                 except OSError, e:
                                         # If the stat failed due to this, then
                                         # assume the repository is possibly
                                         # valid but that there is a permissions
                                         # issue.
                                         if e.errno == errno.EACCES:
-                                                raise api_errors.\
+                                                raise apx.\
                                                     PermissionsException(
                                                     e.filename)
                                         if e.errno == errno.ENOENT:
                                                 raise RepositoryInvalidError(
-                                                    self.repo_root)
+                                                    self.root)
                                         raise
                                 # If the stat succeeded, then regardless of
-                                # whether repo_root is really a directory, the
+                                # whether root is really a directory, the
                                 # repository is invalid.
-                                raise RepositoryInvalidError(self.repo_root)
+                                raise RepositoryInvalidError(self.root)
                         # If repository root hasn't been specified yet,
                         # just raise the error with the path that is available.
                         raise RepositoryInvalidError(root)
@@ -1013,26 +1039,32 @@
                 if root:
                         root = os.path.abspath(root)
                         self.__tmp_root = os.path.join(root, "tmp")
-                        self.feed_cache_root = root
                         self.index_root = os.path.join(root, "index")
-                elif self.repo_root:
-                        self.__tmp_root = os.path.join(self.repo_root, "tmp")
-                        self.feed_cache_root = self.repo_root
-                        self.index_root = os.path.join(self.repo_root, "index")
+                elif self.root:
+                        self.__tmp_root = os.path.join(self.root, "tmp")
+                        self.index_root = os.path.join(self.root,
+                            "index")
                 else:
                         self.__tmp_root = None
-                        self.feed_cache_root = None
                         self.index_root = None
                 self.__writable_root = root
 
-        def __unlock_repository(self):
+        def __unlock_rstore(self):
                 """Unlocks the repository so other consumers may modify it."""
 
-                # XXX need filesystem unlock too?
-                self.__lock.release()
+                try:
+                        if self.__lockf:
+                                # To avoid race conditions with the next caller
+                                # waiting for the lock file, it is simply
+                                # truncated instead of removed.
+                                self.__lockf.truncate(0)
+                                self.__lockf.close()
+                                self.__lockf = None
+                finally:
+                        self.__lock.release()
 
         def __update_searchdb_unlocked(self, fmris):
-                """ Creates an indexer then hands it fmris It assumes that all
+                """Creates an indexer then hands it fmris; it assumes that all
                 needed locking has already occurred.
                 """
                 assert self.index_root
@@ -1043,38 +1075,9 @@
                             log=self.__index_log,
                             sort_file_max_size=self.__sort_file_max_size)
                         index_inst.server_update_index(fmris)
-
-        @staticmethod
-        def __whence(cmd):
-                if cmd[0] != '/':
-                        tmp_cmd = cmd
-                        cmd = None
-                        path = os.environ['PATH'].split(':')
-                        path.append(os.environ['PWD'])
-                        for p in path:
-                                if os.path.exists(os.path.join(p, tmp_cmd)):
-                                        cmd = os.path.join(p, tmp_cmd)
-                                        break
-                        assert cmd
-                return cmd
-
-        def __write_config(self):
-                """Save the repository's current configuration data."""
-
-                # No changes should be written to disk in readonly mode.
-                if self.read_only:
-                        return
-
-                # Save a new configuration (or refresh existing).
-                try:
-                        self.cfg.write()
-                except EnvironmentError, e:
-                        # If we're unable to write due to the following
-                        # errors, it isn't critical to the operation of
-                        # the repository.
-                        if e.errno not in (errno.EPERM, errno.EACCES,
-                            errno.EROFS):
-                                raise
+                        if not self.__search_available:
+                                self.__index_log("Search Available")
+                        self.__search_available = True
 
         def abandon(self, trans_id):
                 """Aborts a transaction with the specified Transaction ID.
@@ -1084,25 +1087,16 @@
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.trans_root:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
+                t = self.__get_transaction(trans_id)
                 try:
-                        try:
-                                t = self.__in_flight_trans[trans_id]
-                        except KeyError:
-                                raise RepositoryInvalidTransactionIDError(
-                                    trans_id)
-
-                        try:
-                                pstate = t.abandon()
-                                del self.__in_flight_trans[trans_id]
-                                return pstate
-                        except trans.TransactionError, e:
-                                raise RepositoryError(e)
-                finally:
-                        self.__unlock_repository()
+                        pstate = t.abandon()
+                        self.__discard_transaction(trans_id)
+                        return pstate
+                except trans.TransactionError, e:
+                        raise RepositoryError(e)
 
         def add(self, trans_id, action):
                 """Adds an action and its content to a transaction with the
@@ -1112,49 +1106,60 @@
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.trans_root:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
+                t = self.__get_transaction(trans_id)
                 try:
-                        try:
-                                t = self.__in_flight_trans[trans_id]
-                        except KeyError:
-                                raise RepositoryInvalidTransactionIDError(
-                                    trans_id)
-
-                        try:
-                                t.add_content(action)
-                        except trans.TransactionError, e:
-                                raise RepositoryError(e)
+                        t.add_content(action)
+                except trans.TransactionError, e:
+                        raise RepositoryError(e)
+
+        def add_content(self, refresh_index=False):
+                """Looks for packages added to the repository that are not in
+                the catalog and adds them.
+
+                'refresh_index' is an optional boolean value indicating whether
+                search indexes should be updated.
+                """
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if not self.catalog_root or self.catalog_version < 1:
+                        raise RepositoryUnsupportedOperationError()
+
+                self.__lock_rstore()
+                try:
+                        self.__rebuild(build_catalog=True,
+                            build_index=refresh_index, incremental=True)
                 finally:
-                        self.__unlock_repository()
+                        self.__unlock_rstore()
 
         def add_file(self, trans_id, data, size=None):
-                """Adds a certificate to a transaction with the specified
-                Transaction ID."""
+                """Adds a file to an in-flight transaction.
+
+                'trans_id' is the identifier of a transaction that
+                the file should be added to.
+
+                'data' is the string object containing the payload of the
+                file to add.
+
+                'size' is an optional integer value indicating the size of
+                the provided payload.
+                """
 
                 if self.mirror:
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.trans_root:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
+                t = self.__get_transaction(trans_id)
                 try:
-                        try:
-                                t = self.__in_flight_trans[trans_id]
-                        except KeyError:
-                                raise RepositoryInvalidTransactionIDError(
-                                    trans_id)
-
-                        try:
-                                t.add_file(data, size)
-                        except trans.TransactionError, e:
-                                raise RepositoryError(e)
-                finally:
-                        self.__unlock_repository()
+                        t.add_file(data, size)
+                except trans.TransactionError, e:
+                        raise RepositoryError(e)
+                return
 
         def add_package(self, pfmri):
                 """Adds the specified FMRI to the repository's catalog."""
@@ -1163,15 +1168,15 @@
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.catalog_root or self.catalog_version < 1:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
+                self.__lock_rstore()
                 try:
                         self.__add_package(pfmri)
                         self.__save_catalog()
                 finally:
-                        self.__unlock_repository()
+                        self.__unlock_rstore()
 
         def replace_package(self, pfmri):
                 """Replaces the information for the specified FMRI in the
@@ -1181,15 +1186,15 @@
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.catalog_root or self.catalog_version < 1:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
+                self.__lock_rstore()
                 try:
                         self.__replace_package(pfmri)
                         self.__save_catalog()
                 finally:
-                        self.__unlock_repository()
+                        self.__unlock_rstore()
 
         @property
         def catalog(self):
@@ -1201,6 +1206,12 @@
 
                 if self.mirror:
                         raise RepositoryMirrorError()
+                if not self.catalog_root:
+                        # Object not available.
+                        raise RepositoryUnsupportedOperationError()
+                if self.catalog_version == 0:
+                        return old_catalog.ServerCatalog(self.catalog_root,
+                            read_only=True, publisher=self.publisher)
 
                 self.__catalog = catalog.Catalog(meta_root=self.catalog_root,
                     log_updates=True, read_only=self.read_only)
@@ -1212,11 +1223,26 @@
                 as the v0 updatelog does not support renames, obsoletion,
                 package removal, etc."""
 
-                if not self.catalog_root:
+                if not self.catalog_root or self.catalog_version < 0:
                         raise RepositoryUnsupportedOperationError()
 
+                if self.catalog_version == 0:
+                        # If catalog is v0, it must be read and returned
+                        # directly to the caller.
+                        if not self.publisher:
+                                raise RepositoryUnsupportedOperationError()
+                        c = old_catalog.ServerCatalog(self.catalog_root,
+                            read_only=True, publisher=self.publisher)
+                        output = cStringIO.StringIO()
+                        c.send(output)
+                        output.seek(0)
+                        for l in output:
+                                yield l
+                        return
+
+                # For all other cases where the catalog object is available,
+                # fake a v0 catalog for the caller's sake.
                 c = self.catalog
-                self.inc_catalog()
 
                 # Yield each catalog attr in the v0 format:
                 # S Last-Modified: 2009-08-28T15:01:48.546606
@@ -1236,15 +1262,22 @@
 
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.catalog_root:
+                if not self.catalog_root or self.catalog_version < 1:
                         raise RepositoryUnsupportedOperationError()
 
                 assert name
-                self.inc_catalog()
-
                 return os.path.normpath(os.path.join(self.catalog_root, name))
 
-        def close(self, trans_id, refresh_index=True, add_to_catalog=True):
+        def reset_search(self):
+                """Discards currenty loaded search data so that it will be
+                reloaded the next a search is performed.
+                """
+                if not self.index_root:
+                        # Nothing to do.
+                        return
+                sqp.TermQuery.clear_cache(self.index_root)
+
+        def close(self, trans_id, add_to_catalog=True):
                 """Closes the transaction specified by 'trans_id'.
 
                 Returns a tuple containing the package FMRI and the current
@@ -1252,27 +1285,28 @@
 
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.repo_root:
+                if not self.trans_root:
                         raise RepositoryUnsupportedOperationError()
 
+                # The repository store should not be locked at this point
+                # as transaction will trigger that indirectly through
+                # add_package().
+                t = self.__get_transaction(trans_id)
                 try:
-                        t = self.__in_flight_trans[trans_id]
-                except KeyError:
-                        raise RepositoryInvalidTransactionIDError(trans_id)
-
-                try:
-                        pfmri, pstate = t.close(refresh_index=refresh_index,
-                        add_to_catalog=add_to_catalog)
-                        del self.__in_flight_trans[trans_id]
+                        pfmri, pstate = t.close(
+                            add_to_catalog=add_to_catalog)
+                        self.__discard_transaction(trans_id)
                         return pfmri, pstate
-                except (api_errors.CatalogError, trans.TransactionError), e:
+                except (apx.CatalogError,
+                    trans.TransactionError), e:
                         raise RepositoryError(e)
 
         def file(self, fhash):
                 """Returns the absolute pathname of the file specified by the
                 provided SHA1-hash name."""
 
-                self.inc_file()
+                if not self.file_root:
+                        raise RepositoryUnsupportedOperationError()
 
                 if fhash is None:
                         raise RepositoryFileNotFoundError(fhash)
@@ -1282,28 +1316,51 @@
                         return fp
                 raise RepositoryFileNotFoundError(fhash)
 
+        def get_publisher(self):
+                """Return the Publisher object for this storage object or None
+                if not available.
+                """
+
+                if not self.publisher:
+                        raise RepositoryUnsupportedOperationError()
+
+                if self.root:
+                        # Determine if configuration for publisher exists
+                        # on-disk already and then return that if it does.
+                        p5ipath = os.path.join(self.root, "pub.p5i")
+                        if os.path.exists(p5ipath):
+                                pubs = p5i.parse(location=p5ipath)
+                                if pubs:
+                                        # Only expecting one, so only return
+                                        # the first.
+                                        return pubs[0][0]
+
+                # No p5i exists, or existing one doesn't contain publisher info,
+                # so return a stub publisher object.
+                return publisher.Publisher(self.publisher)
+
+        def has_transaction(self, trans_id):
+                """Returns a boolean value indicating whether the given
+                in-flight Transaction ID exists.
+                """
+
+                try:
+                        self.__get_transaction(trans_id)
+                        return True
+                except RepositoryInvalidTransactionIDError:
+                        return False
+
         @property
         def in_flight_transactions(self):
                 """The number of transactions awaiting completion."""
                 return len(self.__in_flight_trans)
 
-        def inc_catalog(self):
-                self.catalog_requests += 1
-
-        def inc_manifest(self):
-                self.manifest_requests += 1
-
-        def inc_file(self):
-                self.file_requests += 1
-
-        def inc_flist(self):
-                self.flist_requests += 1
-
-        def inc_flist_files(self):
-                self.flist_files += 1
-
-        def inc_renamed(self):
-                self.pkgs_renamed += 1
+        @property
+        def locked(self):
+                """A boolean value indicating whether the repository is locked.
+                """
+
+                return self.__lock and self.__lock.locked
 
         def manifest(self, pfmri):
                 """Returns the absolute pathname of the manifest file for the
@@ -1311,19 +1368,9 @@
 
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.repo_root:
+                if not self.manifest_root:
                         raise RepositoryUnsupportedOperationError()
-
-                self.inc_manifest()
-
-                try:
-                        if not isinstance(pfmri, fmri.PkgFmri):
-                                pfmri = fmri.PkgFmri(pfmri)
-                        fpath = pfmri.get_dir_path()
-                except fmri.FmriError, e:
-                        raise RepositoryInvalidFMRIError(e)
-
-                return os.path.join(self.manifest_root, fpath)
+                return os.path.join(self.manifest_root, pfmri.get_dir_path())
 
         def open(self, client_release, pfmri):
                 """Starts a transaction for the specified client release and
@@ -1333,20 +1380,16 @@
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.trans_root:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
                 try:
-                        try:
-                                t = trans.Transaction()
-                                t.open(self, client_release, pfmri)
-                                self.__in_flight_trans[t.get_basename()] = t
-                                return t.get_basename()
-                        except trans.TransactionError, e:
-                                raise RepositoryError(e)
-                finally:
-                        self.__unlock_repository()
+                        t = trans.Transaction()
+                        t.open(self, client_release, pfmri)
+                        self.__in_flight_trans[t.get_basename()] = t
+                        return t.get_basename()
+                except trans.TransactionError, e:
+                        raise RepositoryError(e)
 
         def append(self, client_release, pfmri):
                 """Starts an append transaction for the specified client
@@ -1357,92 +1400,86 @@
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.trans_root:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
                 try:
-                        try:
-                                t = trans.Transaction()
-                                t.append(self, client_release, pfmri)
-                                self.__in_flight_trans[t.get_basename()] = t
-                                return t.get_basename()
-                        except trans.TransactionError, e:
-                                raise RepositoryError(e)
-                finally:
-                        self.__unlock_repository()
+                        t = trans.Transaction()
+                        t.append(self, client_release, pfmri)
+                        self.__in_flight_trans[t.get_basename()] = t
+                        return t.get_basename()
+                except trans.TransactionError, e:
+                        raise RepositoryError(e)
 
         def refresh_index(self):
-                """ This function refreshes the search indexes if there any new
-                packages.  It starts a subprocess which results in a call to
-                run_update_index (see below) which does the actual update."""
+                """This function refreshes the search indexes if there any new
+                packages.
+                """
 
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.repo_root:
+                if not self.index_root:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
+                self.__lock_rstore(process=False)
                 try:
-                        self.__refresh_index()
+                        try:
+                                try:
+                                        self.__refresh_index()
+                                except se.InconsistentIndexException, e:
+                                        s = _("Index corrupted or out of date. "
+                                            "Removing old index directory (%s) "
+                                            " and rebuilding search "
+                                            "indexes.") % e.cause
+                                        self.__log(s, "INDEX")
+                                        try:
+                                                self.__rebuild(
+                                                    build_catalog=False,
+                                                    build_index=True)
+                                        except se.IndexingException, e:
+                                                self.__log(str(e), "INDEX")
+                                except se.IndexingException, e:
+                                        self.__log(str(e), "INDEX")
+                        except EnvironmentError, e:
+                                if e.errno in (errno.EACCES, errno.EROFS):
+                                        if self.writable_root:
+                                                raise RepositoryError(
+                                                    _("writable root not "
+                                                    "writable by current user "
+                                                    "id or group."))
+                                        raise RepositoryError(_("unable to "
+                                            "write to index directory."))
+                                raise
                 finally:
-                        self.__unlock_repository()
-
-        def add_content(self, refresh_index=True):
-                """Looks for packages added to the repository that are not in
-                the catalog, adds them, and then updates search data by default.
-                """
-                if self.mirror:
-                        raise RepositoryMirrorError()
-                if not self.repo_root:
-                        raise RepositoryUnsupportedOperationError()
-
-                self.__lock_repository()
-                try:
-                        self.__check_search()
-                        self.__rebuild(incremental=True)
-                        if refresh_index:
-                                self.__refresh_index(synchronous=True)
-                finally:
-                        self.__unlock_repository()
-
-        def rebuild(self, build_index=True):
+                        self.__unlock_rstore()
+
+        def rebuild(self, build_catalog=True, build_index=False):
                 """Rebuilds the repository catalog and search indexes using the
                 package manifests currently in the repository.
 
+                'build_catalog' is an optional boolean value indicating whether
+                package catalogs should be rebuilt.  If True, existing search
+                data will be discarded.
+
                 'build_index' is an optional boolean value indicating whether
-                search indexes should be built.  Regardless of this value, any
-                existing search data will be discarded.
+                search indexes should be built.
                 """
 
                 if self.mirror:
                         raise RepositoryMirrorError()
                 if self.read_only:
                         raise RepositoryReadOnlyError()
-                if not self.repo_root:
+                if not self.catalog_root or self.catalog_version < 1:
                         raise RepositoryUnsupportedOperationError()
 
-                self.__lock_repository()
+                self.__lock_rstore()
                 try:
-                        self.__destroy_catalog()
-                        self.__init_dirs(create=True)
-                        self.__check_search()
-                        self.__rebuild()
-                        if build_index:
-                                self.__refresh_index()
+                        self.__rebuild(build_catalog=build_catalog,
+                            build_index=build_index)
                 finally:
-                        self.__unlock_repository()
-
-        def reload(self, cfgpathname=None, properties=EmptyDict):
-                """Reloads the repository state information from disk."""
-
-                self.__lock_repository()
-                self.__init_config(cfgpathname=cfgpathname,
-                    properties=properties)
-                self.__init_state()
-                self.__unlock_repository()
-
-        def run_update_index(self):
+                        self.__unlock_rstore()
+
+        def __run_update_index(self):
                 """ Determines which fmris need to be indexed and passes them
                 to the indexer.
 
@@ -1453,7 +1490,7 @@
 
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.index_root:
+                if not self.index_root or self.catalog_version < 1:
                         raise RepositoryUnsupportedOperationError()
 
                 c = self.catalog
@@ -1469,6 +1506,9 @@
                             log=self.__index_log,
                             sort_file_max_size=self.__sort_file_max_size)
                         ind.setup()
+                        if not self.__search_available:
+                                self.__index_log("Search Available")
+                        self.__search_available = True
 
         def search(self, queries):
                 """Searches the index for each query in the list of queries.
@@ -1477,23 +1517,28 @@
 
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.index_root:
+                if not self.index_root or not self.catalog_root:
                         raise RepositoryUnsupportedOperationError()
 
+                self.__check_search()
+                if not self.search_available:
+                        raise RepositorySearchUnavailableError()
+
                 def _search(q):
                         assert self.index_root
                         l = sqp.QueryLexer()
                         l.build()
-                        qp = sqp.QueryParser(l)
-                        query = qp.parse(q.text)
+                        qqp = sqp.QueryParser(l)
+                        query = qqp.parse(q.text)
                         query.set_info(num_to_return=q.num_to_return,
                             start_point=q.start_point,
                             index_dir=self.index_root,
                             get_manifest_path=self.manifest,
                             case_sensitive=q.case_sensitive)
-                        return query.search(c.fmris)
-
-                c = self.catalog
+                        if q.return_type == sqp.Query.RETURN_PACKAGES:
+                                query.propagate_pkg_return()
+                        return query.search(self.catalog.fmris)
+
                 query_lst = []
                 try:
                         for s in queries:
@@ -1508,7 +1553,58 @@
 
         @property
         def search_available(self):
-                return self.__search_available or self.__check_search()
+                return (self.__search_available and self.index_root and
+                    os.path.exists(self.index_root)) or self.__check_search()
+
+        def update_publisher(self, pub):
+                """Updates the configuration information for the publisher
+                defined by the provided Publisher object.
+                """
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if self.read_only:
+                        raise RepositoryReadOnlyError()
+                if not self.root:
+                        raise RepositoryUnsupportedOperationError()
+
+                p5ipath = os.path.join(self.root, "pub.p5i")
+                fn = None
+                try:
+                        dirname = os.path.dirname(p5ipath)
+                        fd, fn = tempfile.mkstemp(dir=dirname)
+
+                        st = None
+                        try:
+                                st = os.stat(p5ipath)
+                        except OSError, e:
+                                if e.errno != errno.ENOENT:
+                                        raise
+
+                        if st:
+                                os.fchmod(fd, stat.S_IMODE(st.st_mode))
+                                try:
+                                        portable.chown(fn, st.st_uid, st.st_gid)
+                                except OSError, e:
+                                        if e.errno != errno.EPERM:
+                                                raise
+                        else:
+                                os.fchmod(fd, misc.PKG_FILE_MODE)
+
+                        with os.fdopen(fd, "wb") as f:
+                                with codecs.EncodedFile(f, "utf-8") as ef:
+                                        p5i.write(ef, [pub])
+                        portable.rename(fn, p5ipath)
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise apx.PermissionsException(e.filename)
+                        elif e.errno == errno.EROFS:
+                                raise apx.ReadOnlyFileSystemException(
+                                    e.filename)
+                        raise
+                finally:
+                        if fn and os.path.exists(fn):
+                                os.unlink(fn)
 
         def valid_new_fmri(self, pfmri):
                 """Check that the FMRI supplied as an argument would be valid
@@ -1518,7 +1614,7 @@
 
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.repo_root:
+                if not self.catalog_root:
                         raise RepositoryUnsupportedOperationError()
                 if not fmri.is_valid_pkg_name(pfmri.get_name()):
                         return False
@@ -1532,7 +1628,7 @@
         def valid_append_fmri(self, pfmri):
                 if self.mirror:
                         raise RepositoryMirrorError()
-                if not self.repo_root:
+                if not self.catalog_root:
                         raise RepositoryUnsupportedOperationError()
                 if not fmri.is_valid_pkg_name(pfmri.get_name()):
                         return False
@@ -1543,7 +1639,947 @@
 
                 c = self.catalog
                 entry = c.get_entry(pfmri)
-                return entry
+                return entry is not None
+
+        catalog_root = property(lambda self: self.__catalog_root)
+        file_root = property(lambda self: self.__file_root)
+        root = property(lambda self: self.__root)
+        writable_root = property(lambda self: self.__writable_root)
+
+
+class Repository(object):
+        """A Repository object is a representation of data contained within a
+        pkg(5) repository and an interface to manipulate it."""
+
+        def __init__(self, cfgpathname=None, file_root=None, log_obj=None,
+            mirror=False, properties=misc.EmptyDict, read_only=False,
+            root=None, sort_file_max_size=indexer.SORT_FILE_MAX_SIZE,
+            writable_root=None):
+                """Prepare the repository for use."""
+
+                # This lock is used to protect the repository from multiple
+                # threads modifying it at the same time.  This must be set
+                # first.
+                self.__lock = pkg.nrlock.NRLock()
+                self.__prop_lock = pkg.nrlock.NRLock()
+
+                # Setup any root overrides or root defaults first.
+                self.__file_root = file_root
+                self.__pub_root = None
+                self.__root = None
+                self.__tmp_root = None
+                self.__writable_root = None
+
+                # Set root after roots above.
+                self.__set_root(root)
+
+                # Set writable root last.
+                self.__set_writable_root(writable_root)
+
+                # Stats
+                self.__catalog_requests = 0
+                self.__file_requests = 0
+                self.__manifest_requests = 0
+
+                # Initialize.
+                self.__cfgpathname = cfgpathname
+                self.__cfg = None
+                self.__mirror = mirror
+                self.__read_only = read_only
+                self.__rstores = None
+                self.__sort_file_max_size = sort_file_max_size
+                self.log_obj = log_obj
+                self.version = -1
+
+                self.__lock_repository()
+                try:
+                        self.__init_state(properties=properties)
+                finally:
+                        self.__unlock_repository()
+
+        def __init_format(self, properties=misc.EmptyI):
+                """Private helper function to determine repository format and
+                validity.
+                """
+
+                cfgpathname = None
+                if self.__cfgpathname:
+                        # Use the custom configuration.
+                        cfgpathname = self.__cfgpathname
+                elif self.root:
+                        # Fallback to older standard configuration.
+                        cfgpathname = os.path.join(self.root,
+                            "cfg_cache")
+
+                if self.root:
+                        # Determine if the standard configuration file exists,
+                        # and if so, ignore any custom location specified as it
+                        # is only valid for older formats.
+                        cfgpath = os.path.join(self.root,
+                            "pkg5.repository")
+                        if (cfgpathname and not os.path.exists(cfgpathname)) or \
+                            os.path.isfile(cfgpath):
+                                cfgpathname = cfgpath
+
+                # Load the repository configuration.
+                self.__cfg = RepositoryConfig(target=cfgpathname,
+                    overrides=properties)
+
+                try:
+                        self.version = int(self.cfg.get_property("repository",
+                            "version"))
+                except (cfg.PropertyConfigError, ValueError):
+                        # If version couldn't be read from configuration,
+                        # then allow fallback path below to set things right.
+                        self.version = -1
+
+                if self.version <= 0 and self.root:
+                        # If version doesn't exist, attempt to determine version
+                        # based on structure.
+                        pub_root = os.path.join(self.root, "publisher")
+                        cat_root = os.path.join(self.root, "catalog")
+                        if os.path.exists(pub_root) or \
+                            (self.cfg.version > 3 and
+                            not os.path.exists(cat_root)):
+                                # If publisher root exists or new configuration
+                                # format exists (and the old catalog root
+                                # does not), assume this is a v4 repository.
+                                self.version = 4
+                        elif self.root:
+                                if os.path.exists(cat_root):
+                                        if os.path.exists(os.path.join(
+                                            cat_root, "attrs")):
+                                                # Old catalog implies v2.
+                                                self.version = 2
+                                        else:
+                                                # Assume version 3 otherwise.
+                                                self.version = 3
+
+                                        # Reload the repository configuration
+                                        # so that configuration definitions
+                                        # can match.
+                                        self.__cfg = RepositoryConfig(
+                                            target=cfgpathname,
+                                            overrides=properties,
+                                            version=self.version)
+                                else:
+                                        raise RepositoryInvalidError(
+                                            self.root)
+                        else:
+                                raise RepositoryInvalidError()
+
+                        self.cfg.set_property("repository", "version",
+                            self.version)
+                elif self.version <= 0 and self.file_root:
+                        # If only file root specified, treat as version 4
+                        # repository.
+                        self.version = 4
+
+                # Setup roots.
+                if self.root and not self.file_root:
+                        # Don't create the default file root at this point, but
+                        # set its default location if it exists.
+                        froot = os.path.join(self.root, "file")
+                        if not self.file_root and os.path.exists(froot):
+                                self.__file_root = froot
+
+                if self.version > CURRENT_REPO_VERSION:
+                        raise RepositoryVersionError(self.root,
+                            self.version)
+                if self.version == 4:
+                        if self.root and not self.pub_root:
+                                # Don't create the publisher root at this point,
+                                # but set its expected location.
+                                self.__pub_root = os.path.join(self.root,
+                                    "publisher")
+
+                # Setup repository stores.
+                def_pub = self.cfg.get_property("publisher", "prefix")
+                if self.version == 4:
+                        # For repository versions 4+, there is a repository
+                        # store for the top-level file root...
+                        froot = self.file_root
+                        if not froot:
+                                froot = os.path.join(self.root, "file")
+                        rstore = _RepoStore(file_root=froot,
+                            log_obj=self.log_obj, mirror=self.mirror,
+                            read_only=self.read_only)
+                        self.__rstores[rstore.publisher] = rstore
+
+                        # ...and then one for each publisher if any are known.
+                        if self.pub_root and os.path.exists(self.pub_root):
+                                for pub in os.listdir(self.pub_root):
+                                        self.__new_rstore(pub)
+
+                        # If a default publisher is set, ensure that a storage
+                        # object always exists for it.
+                        if def_pub and def_pub not in self.__rstores:
+                                self.__new_rstore(def_pub)
+                else:
+                        # For older repository versions, there is only one
+                        # repository store, and it might have an associated
+                        # publisher prefix.
+                        rstore = _RepoStore(file_root=self.file_root,
+                            log_obj=self.log_obj, pub=def_pub,
+                            mirror=self.mirror,
+                            read_only=self.read_only,
+                            root=self.root,
+                            writable_root=self.writable_root)
+                        self.__rstores[rstore.publisher] = rstore
+
+                if not self.root:
+                        # Nothing more to do.
+                        return
+
+                try:
+                        fs = os.lstat(self.root)
+                except OSError, e:
+                        # If the stat failed due to this, then assume the
+                        # repository is possibly valid but that there is a
+                        # permissions issue.
+                        if e.errno == errno.EACCES:
+                                raise apx.PermissionsException(
+                                    e.filename)
+                        elif e.errno == errno.ENOENT:
+                                raise RepositoryInvalidError(self.root)
+                        raise
+
+                if not stat.S_ISDIR(stat.S_IFMT(fs.st_mode)):
+                        # Not a directory.
+                        raise RepositoryInvalidError(self.root)
+
+                # Ensure obsolete search data is removed.
+                if self.version >= 3 and not self.read_only:
+                        searchdb_file = os.path.join(self.root, "search")
+                        for ext in ".pag", ".dir":
+                                try:
+                                        os.unlink(searchdb_file + ext)
+                                except OSError:
+                                        # If these can't be removed, it doesn't
+                                        # matter.
+                                        continue
+
+        def __init_state(self, properties=misc.EmptyDict):
+                """Private helper function to initialize state."""
+
+                # Discard current repository storage state data.
+                self.__rstores = {}
+
+                # Determine format, configuration location, and validity.
+                self.__init_format(properties=properties)
+
+                # Ensure default configuration is written.
+                self.__write_config()
+
+        def __lock_repository(self):
+                """Locks the repository preventing multiple consumers from
+                modifying it during operations."""
+
+                # XXX need filesystem lock too?
+                self.__lock.acquire()
+
+        def __log(self, msg, context="", severity=logging.INFO):
+                if self.log_obj:
+                        self.log_obj.log(msg=msg, context=context,
+                            severity=severity)
+
+        def __set_mirror(self, value):
+                self.__prop_lock.acquire()
+                try:
+                        self.__mirror = value
+                        for rstore in self.rstores:
+                                rstore.mirror = value
+                finally:
+                        self.__prop_lock.release()
+
+        def __set_read_only(self, value):
+                self.__prop_lock.acquire()
+                try:
+                        self.__read_only = value
+                        for rstore in self.rstores:
+                                rstore.read_only = value
+                finally:
+                        self.__prop_lock.release()
+
+        def __set_root(self, root):
+                self.__prop_lock.acquire()
+                try:
+                        if root:
+                                root = os.path.abspath(root)
+                                self.__root = root
+                                self.__tmp_root = os.path.join(root, "tmp")
+                        else:
+                                self.__root = None
+                finally:
+                        self.__prop_lock.release()
+
+        def __set_writable_root(self, root):
+                self.__prop_lock.acquire()
+                try:
+                        if root:
+                                root = os.path.abspath(root)
+                                self.__tmp_root = os.path.join(root, "tmp")
+                        elif self.root:
+                                self.__tmp_root = os.path.join(self.root,
+                                    "tmp")
+                        else:
+                                self.__tmp_root = None
+                        self.__writable_root = root
+                finally:
+                        self.__prop_lock.release()
+
+        def __unlock_repository(self):
+                """Unlocks the repository so other consumers may modify it."""
+
+                # XXX need filesystem unlock too?
+                self.__lock.release()
+
+        def __write_config(self):
+                """Save the repository's current configuration data."""
+
+                # No changes should be written to disk in readonly mode.
+                if self.read_only:
+                        return
+
+                # Save a new configuration (or refresh existing).
+                try:
+                        self.cfg.write()
+                except EnvironmentError, e:
+                        # If we're unable to write due to the following
+                        # errors, it isn't critical to the operation of
+                        # the repository.
+                        if e.errno not in (errno.EPERM, errno.EACCES,
+                            errno.EROFS):
+                                raise
+
+        def __new_rstore(self, pub):
+                assert pub
+                if pub in self.__rstores:
+                        raise RepositoryDuplicatePublisher(pub)
+
+                if self.pub_root:
+                        # Newer repository format stores repository data
+                        # partitioned by publisher.
+                        root = os.path.join(self.pub_root, pub)
+                else:
+                        # Older repository formats store repository data
+                        # in a shared root area.
+                        root = self.root
+
+                writ_root = None
+                if self.writable_root:
+                        writ_root = os.path.join(self.writable_root,
+                            "publisher", pub)
+
+                froot = self.file_root
+                if self.root and froot and \
+                    froot.startswith(self.root):
+                        # Ignore the file root if it's the default one.
+                        froot = None
+
+                rstore = _RepoStore(file_root=froot, log_obj=self.log_obj,
+                    mirror=self.mirror, pub=pub, read_only=self.read_only,
+                    root=root,
+                    sort_file_max_size=self.__sort_file_max_size,
+                    writable_root=writ_root)
+                self.__rstores[pub] = rstore
+                return rstore
+
+        def abandon(self, trans_id):
+                """Aborts a transaction with the specified Transaction ID.
+                Returns the current package state.
+                """
+
+                rstore = self.get_trans_rstore(trans_id)
+                return rstore.abandon(trans_id)
+
+        def add(self, trans_id, action):
+                """Adds an action and its content to a transaction with the
+                specified Transaction ID.
+                """
+
+                rstore = self.get_trans_rstore(trans_id)
+                return rstore.add(trans_id, action)
+
+        def add_publisher(self, pub):
+                """Creates a repository storage area for the publisher defined
+                by the provided Publisher object and then stores the publisher's
+                configuration information.  Only supported for version 4 and
+                later repositories.
+                """
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if self.read_only:
+                        raise RepositoryReadOnlyError()
+                if not self.pub_root or self.version < 4:
+                        raise RepositoryUnsupportedOperationError()
+
+                # Create the new repository storage area.
+                rstore = self.__new_rstore(pub.prefix)
+
+                # Update the publisher's configuration.
+                try:
+                        rstore.update_publisher(pub)
+                except:
+                        # If the above fails, be certain to delete the new
+                        # repository storage area and then re-raise the
+                        # original exception.
+                        exc_type, exc_value, exc_tb = sys.exc_info()
+                        try:
+                                shutil.rmtree(rstore.root)
+                        finally:
+                                # This ensures that the original exception and
+                                # traceback are used.
+                                raise exc_value, None, exc_tb
+
+        def add_package(self, pfmri):
+                """Adds the specified FMRI to the repository's catalog."""
+
+                rstore = self.get_pub_rstore(pfmri.publisher)
+                return rstore.add_package(pfmri)
+
+        def append(self, client_release, pfmri, pub=None):
+                """Starts an append transaction for the specified client
+                release and FMRI.  Returns the Transaction ID for the new
+                transaction."""
+
+                try:
+                        if not isinstance(pfmri, fmri.PkgFmri):
+                                pfmri = fmri.PkgFmri(pfmri, client_release)
+                except fmri.FmriError, e:
+                        raise RepositoryInvalidFMRIError(e)
+ 
+                if pub and not pfmri.publisher:
+                        pfmri.publisher = pub
+
+                try:
+                        rstore = self.get_pub_rstore(pfmri.publisher)
+                except RepositoryUnknownPublisher, e:
+                        if not pfmri.publisher:
+                                # No publisher given in FMRI and no default
+                                # publisher so treat as invalid FMRI.
+                                raise RepositoryUnqualifiedFMRIError(pfmri)
+                        raise
+                return rstore.append(client_release, pfmri)
+
+        def catalog_0(self, pub=None):
+                """Returns a generator object for the full version of
+                the catalog contents.  Incremental updates are not provided
+                as the v0 updatelog does not support renames, obsoletion,
+                package removal, etc.
+
+                'pub' is the prefix of the publisher to return catalog data for.
+                If not specified, the default publisher will be used.  If no
+                default publisher has been configured, an AssertionError will be
+                raised.
+                """
+
+                self.inc_catalog()
+                rstore = self.get_pub_rstore(pub)
+                return rstore.catalog_0()
+
+        def catalog_1(self, name, pub=None):
+                """Returns the absolute pathname of the named catalog file.
+
+                'pub' is the prefix of the publisher to return catalog data for.
+                If not specified, the default publisher will be used.  If no
+                default publisher has been configured, an AssertionError will be
+                raised.
+                """
+
+                self.inc_catalog()
+                rstore = self.get_pub_rstore(pub)
+                return rstore.catalog_1(name)
+
+        def close(self, trans_id, add_to_catalog=True):
+                """Closes the transaction specified by 'trans_id'.
+
+                Returns a tuple containing the package FMRI and the current
+                package state in the catalog.
+                """
+
+                self.inc_catalog()
+                rstore = self.get_trans_rstore(trans_id)
+                return rstore.close(trans_id, add_to_catalog=add_to_catalog)
+
+        def file(self, fhash, pub=None):
+                """Returns the absolute pathname of the file specified by the
+                provided SHA1-hash name.
+
+                'pub' is the prefix of the publisher to return catalog data for.
+                If not specified, the default publisher will be used.  If no
+                default publisher has been configured, an AssertionError will be
+                raised.
+                """
+
+                self.inc_file()
+                if pub:
+                        rstore = self.get_pub_rstore(pub)
+                        return rstore.file(fhash)
+
+                # If a publisher wasn't specified, every repository store will
+                # have to be tried since default publisher can't safely apply
+                # here.
+                for rstore in self.rstores:
+                        try:
+                                return rstore.file(fhash)
+                        except RepositoryFileNotFoundError:
+                                # Ignore and try next repository store.
+                                pass
+
+                # Not found in any repository store.
+                raise RepositoryFileNotFoundError(fhash)
+
+        def get_catalog(self, pub=None):
+                """Return the catalog object for the given publisher.
+
+                'pub' is the optional name of the publisher to return the
+                catalog for.  If not provided, the default publisher's
+                catalog will be returned.
+                """
+
+                try:
+                        rstore = self.get_pub_rstore(pub)
+                        return rstore.catalog
+                except RepositoryUnknownPublisher:
+                        if pub:
+                                # In this case, an unknown publisher's
+                                # catalog was requested.
+                                raise
+                        # No catalog to return.
+                        raise RepositoryUnsupportedOperationError()
+
+        def get_pub_rstore(self, pub=None):
+                """Return a repository storage object matching the given
+                publisher (if provided).  If not provided, a repository
+                storage object for the default publisher will be returned.
+                A RepositoryUnknownPublisher exception will be raised if
+                no storage object for the given publisher exists.
+                """
+
+                if pub is None:
+                        pub = self.cfg.get_property("publisher", "prefix")
+                if not pub:
+                        raise RepositoryUnknownPublisher(pub)
+
+                try:
+                        rstore = self.__rstores[pub]
+                except KeyError:
+                        raise RepositoryUnknownPublisher(pub)
+                return rstore
+
+        def __get_cfg_publisher(self, pub):
+                """Return a publisher object for the given publisher prefix
+                based on the repository's configuration information.
+                """
+                assert self.version < 4
+
+                alias = self.cfg.get_property("publisher", "alias")
+                icas = self.cfg.get_property("publisher",
+                    "intermediate_certs")
+                scas = self.cfg.get_property("publisher",
+                    "signing_ca_certs")
+
+                rargs = {}
+                for prop in ("collection_type", "description",
+                    "legal_uris", "mirrors", "name", "origins",
+                    "refresh_seconds", "registration_uri",
+                    "related_uris"):
+                        rargs[prop] = self.cfg.get_property(
+                            "repository", prop)
+
+                repo = publisher.Repository(**rargs)
+                return publisher.Publisher(pub, alias=alias,
+                    repositories=[repo], ca_certs=scas, intermediate_certs=icas)
+
+        def get_publishers(self):
+                """Return publisher objects for all publishers known by the
+                repository.
+                """
+                return [
+                    self.get_publisher(pub)
+                    for pub in self.publishers
+                ]
+
+        def get_publisher(self, pub):
+                """Return the publisher object for the given publisher.  Raises
+                RepositoryUnknownPublisher if no matching publisher can be
+                found.
+                """
+
+                if not pub:
+                        raise RepositoryUnknownPublisher(pub)
+                if self.version < 4:
+                        return self.__get_cfg_publisher(pub)
+
+                rstore = self.get_pub_rstore(pub)
+                if not rstore:
+                        raise RepositoryUnknownPublisher(pub)
+                return rstore.get_publisher()
+
+        def get_status(self):
+                """Return a dictionary of status information about the
+                repository.
+                """
+
+                if self.locked:
+                        rstatus = "processing"
+                else:
+                        rstatus = "online"
+
+                rdata = {
+                    "repository": {
+                        "configuration": self.cfg.get_index(),
+                        "publishers": {},
+                        "requests": {
+                            "catalog": self.catalog_requests,
+                            "file": self.file_requests,
+                            "manifests": self.manifest_requests,
+                        },
+                        "status": rstatus, # Overall repository state.
+                        "version": self.version, # Version of repository.
+                    },
+                    "version": 1, # Version of status structure.
+                }
+
+                for rstore in self.rstores:
+                        if not rstore.publisher:
+                                continue
+                        pubdata = rdata["repository"]["publishers"]
+                        pubdata[rstore.publisher] = rstore.get_status()
+                return rdata
+
+        def get_trans_rstore(self, trans_id):
+                """Return a repository storage object matching the given
+                Transaction ID.  If no repository storage object has a
+                matching Transaction ID, a RepositoryInvalidTransactionIDError
+                will be raised.
+                """
+
+                for rstore in self.rstores:
+                        if rstore.has_transaction(trans_id):
+                                return rstore
+                raise RepositoryInvalidTransactionIDError(trans_id)
+
+        @property
+        def in_flight_transactions(self):
+                """The number of transactions awaiting completion."""
+
+                return sum(
+                    rstore.in_flight_transactions
+                    for rstore in self.rstores
+                )
+
+        def inc_catalog(self):
+                self.__catalog_requests += 1
+
+        def inc_file(self):
+                self.__file_requests += 1
+
+        def inc_manifest(self):
+                self.__manifest_requests += 1
+
+        @property
+        def locked(self):
+                """A boolean value indicating whether the repository is locked.
+                """
+
+                return self.__lock and self.__lock.locked
+
+        def manifest(self, pfmri, pub=None):
+                """Returns the absolute pathname of the manifest file for the
+                specified FMRI.
+                """
+
+                self.inc_manifest()
+
+                try:
+                        if not isinstance(pfmri, fmri.PkgFmri):
+                                pfmri = fmri.PkgFmri(pfmri)
+                except fmri.FmriError, e:
+                        raise RepositoryInvalidFMRIError(e)
+ 
+                if not pub and pfmri.publisher:
+                        pub = pfmri.publisher
+                elif pub and not pfmri.publisher:
+                        pfmri.publisher = pub
+
+                if pub:
+                        try:
+                                rstore = self.get_pub_rstore(pub)
+                        except RepositoryUnknownPublisher, e:
+                                raise RepositoryManifestNotFoundError(pfmri)
+                        return rstore.manifest(pfmri)
+
+                # If a publisher wasn't specified, every repository store will
+                # have to be tried since default publisher can't safely apply
+                # here.  It's assumed that it's unlikely that two publishers
+                # share the exact same FMRI.  Since this case is only for
+                # compatibility, it shouldn't be much of a concern.
+                mpath = None
+                for rstore in self.rstores:
+                        if not rstore.publisher:
+                                continue
+                        mpath = rstore.manifest(pfmri)
+                        if not mpath or not os.path.exists(mpath):
+                                continue
+                        return mpath
+                raise RepositoryManifestNotFoundError(pfmri)
+
+        def open(self, client_release, pfmri, pub=None):
+                """Starts a transaction for the specified client release and
+                FMRI.  Returns the Transaction ID for the new transaction.
+                """
+
+                try:
+                        if not isinstance(pfmri, fmri.PkgFmri):
+                                pfmri = fmri.PkgFmri(pfmri, client_release)
+                except fmri.FmriError, e:
+                        raise RepositoryInvalidFMRIError(e)
+ 
+                if pub and not pfmri.publisher:
+                        pfmri.publisher = pub
+
+                try:
+                        rstore = self.get_pub_rstore(pfmri.publisher)
+                except RepositoryUnknownPublisher, e:
+                        if not pfmri.publisher:
+                                # No publisher given in FMRI and no default
+                                # publisher so treat as invalid FMRI.
+                                raise RepositoryUnqualifiedFMRIError(pfmri)
+                        # A publisher was provided, but no repository storage
+                        # object exists yet, so add one.
+                        rstore = self.__new_rstore(pfmri.publisher)
+                return rstore.open(client_release, pfmri)
+
+        @property
+        def publishers(self):
+                """A set containing the list of publishers known to the
+                repository."""
+
+                pubs = set()
+                pub = self.cfg.get_property("publisher", "prefix")
+                if pub:
+                        pubs.add(pub)
+
+                for rstore in self.rstores:
+                        if rstore.publisher:
+                                pubs.add(rstore.publisher)
+                return pubs
+
+        def refresh_index(self, pub=None):
+                """ This function refreshes the search indexes if there any new
+                packages.
+                """
+
+                for rstore in self.rstores:
+                        if not rstore.publisher:
+                                continue
+                        if pub and rstore.publisher and rstore.publisher != pub:
+                                continue
+                        rstore.refresh_index()
+
+        def add_content(self, pub=None, refresh_index=False):
+                """Looks for packages added to the repository that are not in
+                the catalog, adds them, and then updates search data by default.
+                """
+
+                for rstore in self.rstores:
+                        if not rstore.publisher:
+                                continue
+                        if pub and rstore.publisher and rstore.publisher != pub:
+                                continue
+                        rstore.add_content(refresh_index=refresh_index)
+
+        def add_file(self, trans_id, data, size=None):
+                """Adds a file to a transaction with the specified Transaction
+                ID."""
+
+                rstore = self.get_trans_rstore(trans_id)
+                return rstore.add_file(trans_id, data=data, size=size)
+
+        def add_signing_certs(self, cert_paths, ca, pub=None):
+                """Add the certificates stored in the given paths to the
+                files in the repository and as properties of the publisher.
+                Whether the certificates are added as CA certificates or
+                intermediate certificates is determined by the 'ca' parameter.
+                """
+
+                rstore = self.get_pub_rstore(pub)
+
+                hshs = []
+                for p in cert_paths:
+                        try:
+                                # Get the hash of the cert file and then add it.
+                                hsh, s = misc.get_data_digest(p,
+                                    return_content=True)
+
+                                # The cert must be compressed first before
+                                # adding it to the repository.  The temporary
+                                # file created to do this is moved into place
+                                # by the insert.
+                                fd, pth = tempfile.mkstemp()
+                                gfh = PkgGzipFile(filename=pth, mode="wb")
+                                gfh.write(s)
+                                gfh.close()
+                                rstore.cache_store.insert(hsh, pth)
+                                hshs.append(hsh)
+                        except EnvironmentError, e:
+                                if e.errno == errno.EACCES:
+                                        raise apx.PermissionsException(
+                                            e.filename)
+                                if e.errno == errno.ENOENT:
+                                        raise RepositoryNoSuchFileError(
+                                            e.filename)
+                                raise
+
+                prop_name = "intermediate_certs"
+                if ca:
+                        prop_name = "signing_ca_certs"
+
+                if self.version < 4:
+                        # For older repositories, the information is stored
+                        # in the repository configuration.
+                        t = set(self.cfg.get_property("publisher", prop_name))
+                        t.update(hshs)
+                        self.cfg.set_property("publisher", prop_name, sorted(t))
+                        self.write_config()
+                        return
+
+                # For newer repositories, the certs are stored as part of the
+                # publisher's metadata.
+                pub_obj = rstore.get_publisher()
+                t = set(getattr(pub_obj, prop_name))
+                t.update(hshs)
+                setattr(pub_obj, prop_name, sorted(t))
+                rstore.update_publisher(pub_obj)
+
+        def rebuild(self, build_catalog=True, build_index=False, pub=None):
+                """Rebuilds the repository catalog and search indexes using the
+                package manifests currently in the repository.
+
+                'build_catalog' is an optional boolean value indicating whether
+                package catalogs should be rebuilt.  If True, existing search
+                data will be discarded.
+
+                'build_index' is an optional boolean value indicating whether
+                search indexes should be built.
+                """
+
+                for rstore in self.rstores:
+                        if not rstore.publisher:
+                                continue
+                        if pub and rstore.publisher and rstore.publisher != pub:
+                                continue
+                        rstore.rebuild(build_catalog=build_catalog,
+                            build_index=build_index)
+
+        def reload(self):
+                """Reloads the repository state information."""
+
+                self.__lock_repository()
+                self.__init_state()
+                self.__unlock_repository()
+
+        def remove_signing_certs(self, hshs, ca, pub=None):
+                """Remove the given hashes from the certificates configured
+                for the publisher.  Whether the hashes are removed from the
+                list of CA certificates or the list of intermediate certificates
+                is determined by the 'ca' parameter. """
+
+                rstore = self.get_pub_rstore(pub)
+
+                prop_name = "intermediate_certs"
+                if ca:
+                        prop_name = "signing_ca_certs"
+
+                if self.version < 4:
+                        # For older repositories, the information is stored
+                        # in the repository configuration.
+                        t = set(self.cfg.get_property("publisher", prop_name))
+                        t.difference_update(hshs)
+                        self.cfg.set_property("publisher", prop_name, sorted(t))
+                        self.write_config()
+                        return
+
+                # For newer repositories, the certs are stored as part of the
+                # publisher's metadata.
+                pub_obj = rstore.get_publisher()
+                t = set(getattr(pub_obj, prop_name))
+                t.difference_update(hshs)
+                setattr(pub_obj, prop_name, sorted(t))
+                rstore.update_publisher(pub_obj)
+
+        def replace_package(self, pfmri):
+                """Replaces the information for the specified FMRI in the
+                repository's catalog."""
+
+                rstore = self.get_pub_rstore(pfmri.publisher)
+                return rstore.replace_package(pfmri)
+
+        def reset_search(self, pub=None):
+                """Discards currenty loaded search data so that it will be
+                reloaded for the next search operation.
+                """
+                for rstore in self.rstores:
+                        if pub and rstore.publisher and rstore.publisher != pub:
+                                continue
+                        rstore.reset_search()
+
+        def search(self, queries, pub=None):
+                """Searches the index for each query in the list of queries.
+                Each entry should be the output of str(Query), or a Query
+                object.
+                """
+
+                rstore = self.get_pub_rstore(pub)
+                return rstore.search(queries)
+
+        def supports(self, op, ver):
+                """Returns a boolean value indicating whether the specified
+                operation is supported at the given version.
+                """
+
+                if op == "search" and self.root:
+                        return True
+                if op == "catalog" and ver == 1:
+                        # For catalog v1 to be "supported", all storage objects
+                        # must use it.
+                        for rstore in self.rstores:
+                                if rstore.catalog_version == 0:
+                                        return False
+                        return True
+                # Assume operation is supported otherwise.
+                return True
+
+        def update_publisher(self, pub):
+                """Updates the configuration information for the publisher
+                defined by the provided Publisher object.  Only supported
+                for version 4 and later repositories.
+                """
+
+                if self.mirror:
+                        raise RepositoryMirrorError()
+                if self.read_only:
+                        raise RepositoryReadOnlyError()
+                if not self.pub_root or self.version < 4:
+                        raise RepositoryUnsupportedOperationError()
+
+                # Get the repository storage area for the given publisher.
+                rstore = self.get_pub_rstore(pub.prefix)
+
+                # Update the publisher's configuration.
+                rstore.update_publisher(pub)
+
+        def valid_new_fmri(self, pfmri):
+                """Check that the FMRI supplied as an argument would be valid
+                to add to the repository catalog.  This checks to make sure
+                that any past catalog operations (such as a rename or freeze)
+                would not prohibit the caller from adding this FMRI."""
+
+                rstore = self.get_pub_rstore(pfmri.publisher)
+                return rstore.valid_new_fmri(pfmri)
 
         def write_config(self):
                 """Save the repository's current configuration data."""
@@ -1554,58 +2590,17 @@
                 finally:
                         self.__unlock_repository()
 
-        def add_signing_certs(self, cert_paths, ca, write_config=True):
-                """Add the certificates stored in the given paths to the
-                files in the repository and as properties of the publisher.
-                Whether the certificates are added as CA certificates or
-                intermediate certificates is determined by the 'ca' parameter.
-                """
-
-                hshs = []
-                
-                for p in cert_paths:
-                        # Get the hash of the file.
-                        hsh, s = misc.get_data_digest(p, return_content=True)
-                        hshs.append(hsh)
-                        if self.read_only:
-                                if not self.cache_store.lookup(hsh):
-                                        raise RepositoryReadOnlyError(hsh)
-                        else:
-                                # The temporary file is moved into place by the
-                                # insert.
-                                fd, pth = tempfile.mkstemp()
-                                gfh = PkgGzipFile(filename=pth, mode="wb")
-                                gfh.write(s)
-                                gfh.close()
-                                self.cache_store.insert(hsh, pth)
-                prop_name = "intermediate_certs"
-                if ca:
-                        prop_name = "signing_ca_certs"
-                t = set(self.cfg.get_property("publisher", prop_name))
-                t.update(hshs)
-                self.cfg.set_property("publisher", prop_name, sorted(t))
-                if write_config:
-                        self.write_config()
-
-        def remove_signing_certs(self, hshs, ca, write_config=True):
-                """Remove the given hashes from the certificates configured
-                for the publisher.  Whether the hashes are removed from the
-                list of CA certificates or the list of intermediate certificates
-                is determined by the 'ca' parameter. """
-
-                prop_name = "intermediate_certs"
-                if ca:
-                        prop_name = "signing_ca_certs"
-                t = set(self.cfg.get_property("publisher", prop_name))
-                t.difference_update(hshs)
-                self.cfg.set_property("publisher", prop_name, sorted(t))
-                if write_config:
-                        self.write_config()
-
-        catalog_root = property(__get_catalog_root, __set_catalog_root)
-        file_root = property(__get_file_root, __set_file_root)
-        repo_root = property(__get_repo_root, __set_repo_root)
-        writable_root = property(__get_writable_root, __set_writable_root)
+        catalog_requests = property(lambda self: self.__catalog_requests)
+        cfg = property(lambda self: self.__cfg)
+        file_requests = property(lambda self: self.__file_requests)
+        file_root = property(lambda self: self.__file_root)
+        manifest_requests = property(lambda self: self.__manifest_requests)
+        mirror = property(lambda self: self.__mirror, __set_mirror)
+        pub_root = property(lambda self: self.__pub_root)
+        read_only = property(lambda self: self.__read_only, __set_read_only)
+        root = property(lambda self: self.__root)
+        rstores = property(lambda self: self.__rstores.values())
+        writable_root = property(lambda self: self.__writable_root)
 
 
 class RepositoryConfig(object):
@@ -1634,6 +2629,39 @@
         # This dictionary defines the set of default properties and property
         # groups for a repository configuration indexed by version.
         __defs = {
+            2: [
+                cfg.PropertySection("publisher", [
+                    cfg.PropPublisher("alias"),
+                    cfg.PropPublisher("prefix"),
+                ]),
+                cfg.PropertySection("repository", [
+                    cfg.PropDefined("collection_type", ["core",
+                        "supplemental"], default="core"),
+                    cfg.PropDefined("description"),
+                    cfg.PropPubURI("detailed_url"),
+                    cfg.PropPubURIList("legal_uris"),
+                    cfg.PropDefined("maintainer"),
+                    cfg.PropPubURI("maintainer_url"),
+                    cfg.PropPubURIList("mirrors"),
+                    cfg.PropDefined("name",
+                        default="package repository"),
+                    cfg.PropPubURIList("origins"),
+                    cfg.PropInt("refresh_seconds", default=14400),
+                    cfg.PropPubURI("registration_uri"),
+                    cfg.PropPubURIList("related_uris"),
+                ]),
+                cfg.PropertySection("feed", [
+                    cfg.PropUUID("id"),
+                    cfg.PropDefined("name",
+                        default="package repository feed"),
+                    cfg.PropDefined("description"),
+                    cfg.PropDefined("icon", allowed=["", "<pathname>"],
+                        default="web/_themes/pkg-block-icon.png"),
+                    cfg.PropDefined("logo", allowed=["", "<pathname>"],
+                        default="web/_themes/pkg-block-logo.png"),
+                    cfg.PropInt("window", default=24),
+                ]),
+            ],
             3: [
                 cfg.PropertySection("publisher", [
                     cfg.PropPublisher("alias"),
@@ -1669,6 +2697,14 @@
                     cfg.PropInt("window", default=24),
                 ]),
             ],
+            4: [
+                cfg.PropertySection("publisher", [
+                    cfg.PropPublisher("prefix"),
+                ]),
+                cfg.PropertySection("repository", [
+                    cfg.PropInt("version"),
+                ]),
+            ],
         }
 
         def __new__(cls, target=None, overrides=misc.EmptyDict, version=None):
@@ -1681,24 +2717,57 @@
                 return cfg.FileConfig(target, definitions=cls.__defs,
                     overrides=overrides, version=version)
 
-def repository_create(repo_uri):
+
+def repository_create(repo_uri, properties=misc.EmptyDict, version=None):
         """Create a repository at given location and return the Repository
-        object for the new repository.  If the repository already exists,
-        a RepositoryExistsError will be raised.  Other errors can raise
-        exceptions of class ApiException.
+        object for the new repository.  If a repository (or directory at
+        the given location) already exists, a RepositoryExistsError will be
+        raised.  Other errors can raise exceptions of class ApiException.
         """
 
+        if isinstance(repo_uri, basestring):
+                repo_uri = publisher.RepositoryURI(misc.parse_uri(repo_uri))
+
         path = repo_uri.get_pathname()
         if not path:
                 # Bad URI?
                 raise RepositoryInvalidError(str(repo_uri))
 
+        if version is not None and (version < 3 or
+            version > CURRENT_REPO_VERSION):
+                raise RepositoryUnsupportedOperationError()
+
         try:
-                Repository(auto_create=False, read_only=True, repo_root=path)
-        except RepositoryInvalidError:
-                # No valid repository found; so create one.
-                repo = Repository(auto_create=True, repo_root=path)
-                assert os.path.exists(repo.repo_root)
-                return repo
-        # A repository isn't supposed to exist at this location.
-        raise RepositoryExistsError(path)
+                os.makedirs(path, misc.PKG_DIR_MODE)
+        except EnvironmentError, e:
+                if e.filename == path and (e.errno == errno.EEXIST or
+                    os.path.exists(e.filename)):
+                        raise RepositoryExistsError(e.filename)
+                elif e.errno == errno.EACCES:
+                        raise apx.PermissionsException(e.filename)
+                elif e.errno == errno.EROFS:
+                        raise apx.ReadOnlyFileSystemException(e.filename)
+                elif e.errno != errno.EEXIST or e.filename != path:
+                        raise
+
+        if version == 3:
+                # Version 3 repositories are expected to contain an additional
+                # set of specific directories...
+                for d in ("catalog", "file", "index", "pkg", "trans", "tmp"):
+                        misc.makedirs(os.path.join(path, d))
+
+                # ...and this file (which can be empty).
+                try:
+                        with file(os.path.join(path, "cfg_cache"), "wb") as cf:
+                                cf.write("\n")
+                except EnvironmentError, e:
+                        if e.errno == errno.EACCES:
+                                raise apx.PermissionsException(
+                                    e.filename)
+                        if e.errno == errno.EROFS:
+                                raise apx.ReadOnlyFileSystemException(
+                                    e.filename)
+                        elif e.errno != errno.EEXIST:
+                                raise
+
+        return Repository(read_only=False, properties=properties, root=path)
--- a/src/modules/server/transaction.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/modules/server/transaction.py	Thu Aug 19 23:33:49 2010 -0700
@@ -31,6 +31,7 @@
 import os
 import re
 import shutil
+import time
 import urllib
 
 import pkg.actions as actions
@@ -115,6 +116,14 @@
                 return str(self.data)
 
 
+class TransactionUnknownIDError(TransactionError):
+        """Used to indicate that the specified transaction ID is unknown."""
+
+        def __str__(self):
+                return _("No Transaction matching ID '%s' could be found.") % \
+                    self.data
+
+
 class TransactionAlreadyOpenError(TransactionError):
         """Used to indicate that a Transaction is already open for use."""
 
@@ -133,7 +142,7 @@
                 self.open_time = None
                 self.pkg_name = ""
                 self.esc_pkg_name = ""
-                self.repo = None
+                self.rstore = None
                 self.client_release = ""
                 self.fmri = None
                 self.dir = ""
@@ -143,7 +152,6 @@
                 self.types_found = set()
                 self.append_trans = False
                 self.remaining_payload_cnt = 0
-                return
 
         def get_basename(self):
                 assert self.open_time
@@ -152,9 +160,9 @@
                     (calendar.timegm(self.open_time.utctimetuple()),
                     urllib.quote(str(self.fmri), ""))
 
-        def open(self, repo, client_release, pfmri):
-                # XXX needs to be done in __init__
-                self.repo = repo
+        def open(self, rstore, client_release, pfmri):
+                # Store a reference to the repository storage object.
+                self.rstore = rstore
 
                 if client_release is None:
                         raise TransactionOperationError(client_release=None,
@@ -162,6 +170,9 @@
                 if pfmri is None:
                         raise TransactionOperationError(pfmri=None)
 
+                if not isinstance(pfmri, basestring):
+                        pfmri = str(pfmri)
+
                 self.client_release = client_release
                 self.pkg_name = pfmri
                 self.esc_pkg_name = urllib.quote(pfmri, "")
@@ -181,15 +192,9 @@
                 # Ensure that the FMRI has been fully qualified with publisher
                 # information or apply the default if appropriate.
                 if not self.fmri.publisher:
-                        c = repo.catalog
-                        pubs = c.publishers()
-                        default_pub = repo.cfg.get_property("publisher",
-                            "prefix")
-
-                        if len(pubs) > 1 or not default_pub:
-                                # A publisher is required if the repository
-                                # contains package data for more than one
-                                # publisher or no default has been defined.
+                        default_pub = rstore.publisher
+                        if not default_pub:
+                                # A publisher is required.
                                 raise TransactionOperationError(
                                     publisher_required=True, pfmri=pfmri)
 
@@ -213,22 +218,28 @@
                         self.pkg_name = ":".join(pfmri.split(":")[:-1])
                         self.esc_pkg_name = urllib.quote(self.pkg_name, "")
                 else:
-                        # A timestamp was not provided.
-                        self.open_time = datetime.datetime.utcnow()
-                        self.fmri.set_timestamp(self.open_time)
+                        # A timestamp was not provided; try to generate a
+                        # unique one.
+                        while 1:
+                                self.open_time = datetime.datetime.utcnow()
+                                self.fmri.set_timestamp(self.open_time)
+                                cat = rstore.catalog
+                                if not cat.get_entry(self.fmri):
+                                        break
+                                time.sleep(.25)
 
                 # Check that the new FMRI's version is valid.  In other words,
                 # the package has not been renamed or frozen for the new
                 # version.
-                if not repo.valid_new_fmri(self.fmri):
+                if not self.rstore.valid_new_fmri(self.fmri):
                         raise TransactionOperationError(valid_new_fmri=False,
                             pfmri=pfmri)
 
                 trans_basename = self.get_basename()
-                self.dir = "%s/%s" % (repo.trans_root, trans_basename)
+                self.dir = os.path.join(self.rstore.trans_root, trans_basename)
 
                 try:
-                        os.makedirs(self.dir)
+                        os.makedirs(self.dir, misc.PKG_DIR_MODE)
                 except EnvironmentError, e:
                         if e.errno == errno.EEXIST:
                                 raise TransactionAlreadyOpenError(
@@ -238,7 +249,8 @@
                 #
                 # always create a minimal manifest
                 #
-                tfile = file("%s/manifest" % self.dir, "ab+")
+                tfpath = os.path.join(self.dir, "manifest")
+                tfile = file(tfpath, "ab+")
 
                 # Build a set action containing the fully qualified FMRI and add
                 # it to the manifest.  While it may seem inefficient to create
@@ -261,9 +273,8 @@
                 # if not found, create package
                 # set package state to TRANSACTING
 
-        def append(self, repo, client_release, pfmri):
-                # XXX needs to be done in __init__
-                self.repo = repo
+        def append(self, rstore, client_release, pfmri):
+                self.rstore = rstore
                 self.append_trans = True
 
                 if client_release is None:
@@ -272,6 +283,9 @@
                 if pfmri is None:
                         raise TransactionOperationError(pfmri=None)
 
+                if not isinstance(pfmri, basestring):
+                        pfmri = str(pfmri)
+
                 self.client_release = client_release
                 self.pkg_name = pfmri
                 self.esc_pkg_name = urllib.quote(pfmri, "")
@@ -291,15 +305,9 @@
                 # Ensure that the FMRI has been fully qualified with publisher
                 # information or apply the default if appropriate.
                 if not self.fmri.publisher:
-                        c = repo.catalog
-                        pubs = c.publishers()
-                        default_pub = repo.cfg.get_property("publisher",
-                            "prefix")
-
-                        if len(pubs) > 1 or not default_pub:
-                                # A publisher is required if the repository
-                                # contains package data for more than one
-                                # publisher or no default has been defined.
+                        default_pub = rstore.publisher
+                        if not default_pub:
+                                # A publisher is required.
                                 raise TransactionOperationError(
                                     publisher_required=True, pfmri=pfmri)
 
@@ -315,25 +323,21 @@
 
                 # record transaction metadata: opening_time, package, user
                 self.open_time = self.fmri.get_timestamp()
-                if self.open_time:
-                        # Strip the timestamp information for consistency with
-                        # the case where it was not specified.
-                        self.pkg_name = ":".join(pfmri.split(":")[:-1])
-                        self.esc_pkg_name = urllib.quote(self.pkg_name, "")
-                else:
-                        # A timestamp was not provided.
-                        self.open_time = datetime.datetime.utcnow()
-                        self.fmri.set_timestamp(self.open_time)
 
-                if not repo.valid_append_fmri(self.fmri):
+                # Strip the timestamp information for consistency with
+                # the case where it was not specified.
+                self.pkg_name = ":".join(pfmri.split(":")[:-1])
+                self.esc_pkg_name = urllib.quote(self.pkg_name, "")
+
+                if not rstore.valid_append_fmri(self.fmri):
                         raise TransactionOperationError(missing_fmri=True,
                             pfmri=self.fmri)
 
                 trans_basename = self.get_basename()
-                self.dir = "%s/%s" % (repo.trans_root, trans_basename)
+                self.dir = os.path.join(rstore.trans_root, trans_basename)
 
                 try:
-                        os.makedirs(self.dir)
+                        os.makedirs(self.dir, misc.PKG_DIR_MODE)
                 except EnvironmentError, e:
                         if e.errno == errno.EEXIST:
                                 raise TransactionAlreadyOpenError(
@@ -345,19 +349,22 @@
                 with open(os.path.join(self.dir, "append"), "wb") as fh:
                         pass
 
-                # copy in existing manifest, then open it for appending
-                m = self.repo._get_manifest(pfmri)
-                tfile = file("%s/manifest" % self.dir, "ab+")
-                tfile.write(str(m))
-                tfile.close()
+                # copy in existing manifest, then open it for appending.
+                portable.copyfile(rstore.manifest(self.fmri),
+                    os.path.join(self.dir, "manifest"))
+
+        def reopen(self, rstore, trans_dir):
+                """The reopen() method is invoked by the repository as needed to
+                load Transaction data."""
 
-        def reopen(self, repo, trans_dir):
-                """The reopen() method is invoked on server restart, to
-                reestablish the status of inflight transactions."""
+                self.rstore = rstore
+                try:
+                        open_time_str, self.esc_pkg_name = \
+                            os.path.basename(trans_dir).split("_", 1)
+                except ValueError:
+                        raise TransactionUnknownIDError(os.path.baseame(
+                            trans_dir))
 
-                self.repo = repo
-                open_time_str, self.esc_pkg_name = \
-                    os.path.basename(trans_dir).split("_", 1)
                 self.open_time = \
                     datetime.datetime.utcfromtimestamp(int(open_time_str))
                 self.pkg_name = urllib.unquote(self.esc_pkg_name)
@@ -366,11 +373,19 @@
                 # client release on the initial open of the transaction.
                 self.fmri = fmri.PkgFmri(self.pkg_name, None)
 
-                self.dir = "%s/%s" % (repo.trans_root, self.get_basename())
+                self.dir = os.path.join(rstore.trans_root, self.get_basename())
+
+                if not os.path.exists(self.dir):
+                        raise TransactionUnknownIDError(self.get_basename())
+
+                tmode = "rb"
+                if not rstore.read_only:
+                        tmode += "+"
 
                 # Find out if the package is renamed or obsolete.
                 try:
-                        tfile = file("%s/manifest" % self.dir, "rb+")
+                        tfpath = os.path.join(self.dir, "manifest")
+                        tfile = file(tfpath, tmode)
                 except IOError, e:
                         if e.errno == errno.ENOENT:
                                 return
@@ -386,7 +401,7 @@
                     action.name for action in m.gen_actions()
                 ))
 
-        def close(self, refresh_index=True, add_to_catalog=True):
+        def close(self, add_to_catalog=True):
                 """Closes an open transaction, returning the published FMRI for
                 the corresponding package, and its current state in the catalog.
                 """
@@ -402,16 +417,14 @@
 
                 # set state to PUBLISHED
                 if self.append_trans:
-                        pkg_fmri, pkg_state = self.accept_append(refresh_index,
-                            add_to_catalog)
+                        pkg_fmri, pkg_state = self.accept_append(add_to_catalog)
                 else:
-                        pkg_fmri, pkg_state = self.accept_publish(refresh_index,
+                        pkg_fmri, pkg_state = self.accept_publish(
                             add_to_catalog)
 
                 # Discard the in-flight transaction data.
                 try:
-                        shutil.rmtree(os.path.join(self.repo.trans_root,
-                            trans_id))
+                        shutil.rmtree(self.dir)
                 except EnvironmentError, e:
                         # Ensure that the error goes to stderr, and then drive
                         # on as the actual package was published.
@@ -420,9 +433,12 @@
                 return (pkg_fmri, pkg_state)
 
         def abandon(self):
-                trans_id = self.get_basename()
                 # state transition from TRANSACTING to ABANDONED
-                shutil.rmtree("%s/%s" % (self.repo.trans_root, trans_id))
+                try:
+                        shutil.rmtree(self.dir)
+                except EnvironmentError, e:
+                        if e.filename == self.dir and e.errno != errno.ENOENT:
+                                raise
                 return "ABANDONED"
 
         def add_content(self, action):
@@ -454,7 +470,8 @@
                         # Extract ELF information
                         # XXX This needs to be modularized.
                         if haveelf and data[:4] == "\x7fELF":
-                                elf_name = "%s/.temp" % self.dir
+                                elf_name = os.path.join(self.dir, ".temp-%s"
+                                    % fname)
                                 elf_file = open(elf_name, "wb")
                                 elf_file.write(data)
                                 elf_file.close()
@@ -474,9 +491,18 @@
                                 action.attrs["elfarch"] = elf_info["arch"]
                                 os.unlink(elf_name)
 
-                        dst_path = self.repo.cache_store.lookup(fname)
-                        csize, chash = misc.compute_compressed_attrs(fname,
-                            dst_path, data, size, self.dir)
+                        try:
+                                dst_path = self.rstore.file(fname)
+                        except Exception, e:
+                                # The specific exception can't be named here due
+                                # to the cyclic dependency between this class
+                                # and the repository class.
+                                if getattr(e, "data", "") != fname:
+                                        raise
+                                dst_path = None
+
+                        csize, chash = misc.compute_compressed_attrs(
+                            fname, dst_path, data, size, self.dir)
                         action.attrs["chash"] = chash.hexdigest()
                         action.attrs["pkg.csize"] = csize
                         chash = None
@@ -519,16 +545,17 @@
                             " be marked for both obsoletion and renaming."))
                 elif self.obsolete and action.name != "set":
                         raise TransactionOperationError(_("A '%s' action cannot"
-                            " be present in an obsolete package: %s") % 
+                            " be present in an obsolete package: %s") %
                             (action.name, action))
                 elif self.renamed and action.name not in ("set", "depend"):
                         raise TransactionOperationError(_("A '%s' action cannot"
-                            " be present in a renamed package: %s") % 
+                            " be present in a renamed package: %s") %
                             (action.name, action))
 
                 # Now that the action is known to be sane, we can add it to the
                 # manifest.
-                tfile = file("%s/manifest" % self.dir, "ab+")
+                tfpath = os.path.join(self.dir, "manifest")
+                tfile = file(tfpath, "ab+")
                 print >> tfile, action
                 tfile.close()
 
@@ -543,7 +570,16 @@
                 if size is None:
                         size = len(data)
 
-                dst_path = self.repo.cache_store.lookup(fname)
+                try:
+                        dst_path = self.rstore.file(fname)
+                except Exception, e:
+                        # The specific exception can't be named here due
+                        # to the cyclic dependency between this class
+                        # and the repository class.
+                        if getattr(e, "data", "") != fname:
+                                raise
+                        dst_path = None
+
                 csize, chash = misc.compute_compressed_attrs(fname, dst_path,
                     data, size, self.dir)
                 chash = None
@@ -551,7 +587,7 @@
 
                 self.remaining_payload_cnt -= 1
 
-        def accept_publish(self, refresh_index=True, add_to_catalog=True):
+        def accept_publish(self, add_to_catalog=True):
                 """Transaction meets consistency criteria, and can be published.
                 Publish, making appropriate catalog entries."""
 
@@ -565,15 +601,12 @@
                 # PUBLISHED due to the package's arrival.
 
                 self.publish_package()
-
                 if add_to_catalog:
-                        self.repo.add_package(self.fmri)
-                if refresh_index:
-                        self.repo.refresh_index()
+                        self.rstore.add_package(self.fmri)
 
                 return (str(self.fmri), "PUBLISHED")
 
-        def accept_append(self, refresh_index=True, add_to_catalog=True):
+        def accept_append(self, add_to_catalog=True):
                 """Transaction meets consistency criteria, and can be published.
                 Publish, making appropriate catalog replacements."""
 
@@ -594,9 +627,7 @@
                 self.publish_package()
 
                 if add_to_catalog:
-                        self.repo.replace_package(self.fmri)
-                if refresh_index:
-                        self.repo.refresh_index()
+                        self.rstore.replace_package(self.fmri)
 
                 return (str(self.fmri), "PUBLISHED")
 
@@ -605,28 +636,19 @@
 
                 It moves the files associated with the transaction into the
                 appropriate position in the server repository.  Callers
-                shall supply a fmri, repository, and transaction in fmri,
-                repo, and trans, respectively."""
-
-                repo = self.repo
+                shall supply a fmri, repo store, and transaction in fmri,
+                rstore, and trans, respectively."""
 
                 pkg_name = self.fmri.pkg_name
-                pkgdir = os.path.join(repo.manifest_root,
-                    urllib.quote(pkg_name, ""))
-
-                # If the directory isn't there, create it.
-                if not os.path.exists(pkgdir):
-                        os.makedirs(pkgdir)
 
                 # mv manifest to pkg_name / version
-                # A package may have no files, so there needn't be a manifest.
-                mpath = os.path.join(self.dir, "manifest")
-                if os.path.exists(mpath):
-                        portable.rename(mpath, os.path.join(pkgdir,
-                            urllib.quote(str(self.fmri.version), "")))
+                src_mpath = os.path.join(self.dir, "manifest")
+                dest_mpath = self.rstore.manifest(self.fmri)
+                misc.makedirs(os.path.dirname(dest_mpath))
+                portable.rename(src_mpath, dest_mpath)
 
                 # Move each file to file_root, with appropriate directory
                 # structure.
                 for f in os.listdir(self.dir):
                         src_path = os.path.join(self.dir, f)
-                        self.repo.cache_store.insert(f, src_path)
+                        self.rstore.cache_store.insert(f, src_path)
--- a/src/pkg/Makefile	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/pkg/Makefile	Thu Aug 19 23:33:49 2010 -0700
@@ -25,7 +25,7 @@
 PKGVERS_BUILTON   = 5.11
 BUILDNUM.cmd      = hg tags | nawk '$$1 ~ /^in[0-9]*$$/ {print substr($$1, 3) + 1; exit}'
 BUILDNUM          = $(BUILDNUM.cmd:sh)
-CURBUILD.cmd      = pkg list -H osnet-incorporation | sed -e 's/.*-0\.\([^ ]*\).*/\1/'
+CURBUILD.cmd      = pkg -R / list -H osnet-incorporation | sed -e 's/.*-0\.\([^ ]*\).*/\1/'
 CURBUILD          = $(CURBUILD.cmd:sh)
 PKGVERS_BRANCH    = 0.$(BUILDNUM)
 PKGVERS           = $(PKGVERS_COMPONENT),$(PKGVERS_BUILTON)-$(PKGVERS_BRANCH)
@@ -96,7 +96,7 @@
 # Initialize the repository
 $(PKGDEST)/repo:
 	$(PKGREPO) create $@
-	$(PKGREPO) -s $@ set-property publisher/prefix=$(PKGPUBLISHER)
+	$(PKGREPO) -s $@ set publisher/prefix=$(PKGPUBLISHER)
 
 # Special rule for creating the incorporation.  Note that the incorporation
 # looks at all manifests, not just the ones listed in $(MANIFESTS).  This is
--- a/src/pkgdep.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/pkgdep.py	Thu Aug 19 23:33:49 2010 -0700
@@ -41,7 +41,7 @@
 import pkg.publish.dependencies as dependencies
 from pkg.misc import msg, emsg, PipeError
 
-CLIENT_API_VERSION = 40
+CLIENT_API_VERSION = 42
 PKG_CLIENT_NAME = "pkgdepend"
 
 DEFAULT_SUFFIX = ".res"
--- a/src/pkgrepo.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/pkgrepo.py	Thu Aug 19 23:33:49 2010 -0700
@@ -35,28 +35,44 @@
 # listing constants
 LISTING_FORMATS = ("tsv", )
 
+# globals
+tmpdirs = []
+
+import atexit
+import copy
 import errno
 import getopt
 import gettext
 import locale
 import logging
 import os
+import pipes
+import shlex
+import shutil
 import sys
-import urllib
-import urlparse
+import tempfile
+import traceback
 import warnings
 
 from pkg.client import global_settings
 from pkg.misc import msg, PipeError
 import pkg
+import pkg.catalog
 import pkg.client.api_errors as apx
 import pkg.client.publisher as publisher
+import pkg.client.progress as progress
+import pkg.client.transport.transport as transport
 import pkg.misc as misc
 import pkg.server.repository as sr
-import shlex
-import traceback
 
 logger = global_settings.logger
+orig_cwd = None
+
[email protected]
+def cleanup():
+        """To be called at program finish."""
+        for d in tmpdirs:
+                shutil.rmtree(d, True)
 
 def error(text, cmd=None):
         """Emit an error message prefixed by the command name """
@@ -100,31 +116,40 @@
         pkgrepo [options] command [cmd_options] [operands]
 
 Subcommands:
-        pkgrepo create uri_or_path
+     pkgrepo create [--version] uri_or_path
+
+     pkgrepo add-signing-ca-cert [-p publisher ...]
+         [-s repo_uri_or_path] path ...
 
-        pkgrepo property [-F format] [-H] [<section/property> ...]
-        pkgrepo set-property <section/property>=<value> or 
-            <section/property>=(["<value>", ...])
+     pkgrepo add-signing-intermediate-cert [-p publisher ...]
+         [-s repo_uri_or_path] path ...
 
-        pkgrepo publisher [-F format] [-H] [publisher ...]
+     pkgrepo get [-p publisher ...] [-s repo_uri_or_path]
+         [section/property ...]
+
+     pkgrepo info [-F format] [-H] [-p publisher ...]
+         [-s repo_uri_or_path]
 
-        pkgrepo rebuild [--no-index]
-        pkgrepo refresh [--no-catalog] [--no-index]
+     pkgrepo rebuild [-s repo_uri_or_path] [--no-catalog]
+         [--no-index]
+
+     pkgrepo refresh [-s repo_uri_or_path] [--no-catalog]
+         [--no-index]
+
+     pkgrepo remove-signing-ca-cert [-p publisher ...]
+         [-s repo_uri_or_path] hash ...
 
-        pkgrepo add-signing-ca-cert path ...
-        pkgrepo add-signing-intermediate-cert path ...
-        pkgrepo remove-signing-ca-cert hash ...
-        pkgrepo remove-signing-intermediate-cert hash ...
+     pkgrepo remove-signing-intermediate-cert [-p publisher ...]
+         [-s repo_uri_or_path] hash ...
 
-        pkgrepo version
-        pkgrepo help
+     pkgrepo set [-p publisher ...] [-s repo_uri_or_path]
+         section/property[+|-]=[value] ... or
+         section/property[+|-]=([value]) ...
+
+     pkgrepo help
+     pkgrepo version
 
 Options:
-        -s repo_uri_or_path
-            A URI or filesystem path representing the location of a package
-            repository. Currently, only filesystem-based repositories are
-            supported.
-
         --help or -?
             Displays a usage message."""))
 
@@ -142,110 +167,162 @@
         into a valid repository URI.
         """
 
-        if uri.find("://") == -1 and not uri.startswith("file:/"):
-                # Convert the file path to a URI.
-                uri = os.path.abspath(uri)
-                uri = urlparse.urlunparse(("file", "",
-                    urllib.pathname2url(uri), "", "", ""))
-
-        scheme, netloc, path, params, query, fragment = \
-            urlparse.urlparse(uri, "file", allow_fragments=0)
-        scheme = scheme.lower()
-
-        if scheme != "file":
-                usage(_("Network repositories are not currently supported."),
-                    retcode=1)
-
-        if scheme == "file":
-                # During urlunparsing below, ensure that the path starts with
-                # only one '/' character, if any are present.
-                if path.startswith("/"):
-                        path = "/" + path.lstrip("/")
-
-        # Rebuild the url with the sanitized components.
-        uri = urlparse.urlunparse((scheme, netloc, path, params,
-            query, fragment))
-        return publisher.RepositoryURI(uri)
+        return publisher.RepositoryURI(misc.parse_uri(uri))
 
 
-def get_repo(conf, read_only=True, refresh_index=False):
-        """Return the repository object for current program configuration."""
+def _add_certs(conf, subcommand, args, ca):
+        opts, pargs = getopt.getopt(args, "p:s:")
+        pubs = set()
 
-        repo_uri = conf["repo_root"]
-        path = repo_uri.get_pathname()
-        if not path:
-                # Bad URI?
-                raise sr.RepositoryInvalidError(str(repo_uri))
-        return sr.Repository(auto_create=False, read_only=read_only,
-            refresh_index=refresh_index, repo_root=path)
+        for opt, arg in opts:
+                if opt == "-p":
+                        pubs.add(arg)
+                elif opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
 
+        # Get repository object.
+        if not conf.get("repo_uri", None):
+                usage(_("A package repository location must be provided "
+                    "using -s."), cmd=subcommand)
 
-def subcmd_add_signing_ca_cert(conf, args):
-        subcommand = "add-signing-ca-cert"
-        repo = get_repo(conf, read_only=False)
-        opts, pargs = getopt.getopt(args, "")
+        repo = get_repo(conf, read_only=False, subcommand=subcommand)
 
         if len(pargs) < 1:
                 usage(_("At least one path to a certificate must be provided."))
 
-        fps = [os.path.abspath(f) for f in pargs]
-        repo.add_signing_certs(fps, ca=True)
+        failed = []
+        def add_certs(pfx=None):
+                if orig_cwd:
+                        certs = [os.path.join(orig_cwd, f) for f in pargs]
+                else:
+                        certs = [os.path.abspath(f) for f in pargs]
+
+                try:
+                        repo.add_signing_certs(certs, ca=ca, pub=pfx)
+                except (apx.ApiException, sr.RepositoryError), e:
+                        failed.append((pfx, e))
+
+        if "all" in pubs:
+                # Default to list of all publishers.
+                pubs = repo.publishers
+
+        if not pubs:
+                # Assume default publisher or older repository.
+                add_certs()
+        else:
+                # Add for each publisher specified.
+                map(add_certs, pubs)
+
+        return pubs, failed
+
+
+def subcmd_add_signing_ca_cert(conf, args):
+        """Add the provided signing ca certificates to the repository for
+        the given publisher."""
+
+        subcommand = "add-signing-ca-cert"
+        pubs, failed = _add_certs(conf, subcommand, args, True)
+        if failed:
+                for pfx, details in failed:
+                        error(_("Unable to add signing ca certificates for "
+                            "publisher '%(pfx)s':\n%(details)s") % locals(),
+                            cmd=subcommand)
+                if len(failed) < len(pubs):
+                        return EXIT_PARTIAL
+                return EXIT_OOPS
+        return EXIT_OK
 
 
 def subcmd_add_signing_intermediate_cert(conf, args):
         subcommand = "add-signing-intermediate-cert"
-        repo = get_repo(conf, read_only=False)
-        opts, pargs = getopt.getopt(args, "")
+        pubs, failed = _add_certs(conf, subcommand, args, True)
+        if failed:
+                for pfx, details in failed:
+                        if pfx:
+                                error(_("Unable to add signing intermediate "
+                                    "certificates for publisher '%(pfx)s':\n"
+                                    "%(details)s") % locals(), cmd=subcommand)
+                        else:
+                                error(_("Unable to add signing intermediate "
+                                    "certificates:\n%(details)s") % locals(),
+                                    cmd=subcommand)
+                if len(failed) < len(pubs):
+                        return EXIT_PARTIAL
+                return EXIT_OOPS
+        return EXIT_OK
+
+
+def _remove_certs(conf, subcommand, args, ca):
+        opts, pargs = getopt.getopt(args, "p:s:")
+        pubs = set()
+
+        for opt, arg in opts:
+                if opt == "-p":
+                        pubs.add(arg)
+                elif opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
+
+        # Get repository object.
+        if not conf.get("repo_uri", None):
+                usage(_("A package repository location must be provided "
+                    "using -s."), cmd=subcommand)
+
+        repo = get_repo(conf, read_only=False, subcommand=subcommand)
 
         if len(pargs) < 1:
-                usage(_("At least one path to a certificate must be provided."))
+                usage(_("At least one certificate hash must be provided."))
+
+        failed = []
+        def remove_certs(pfx=None):
+                try:
+                        repo.remove_signing_certs(pargs, ca=True, pub=pfx)
+                except (apx.ApiException, sr.RepositoryError), e:
+                        failed.append((pfx, e))
 
-        fps = [os.path.abspath(f) for f in pargs]
-        repo.add_signing_certs(fps, ca=False)
+        if "all" in pubs:
+                # Default to list of all publishers.
+                pubs = repo.publishers
+
+        if not pubs:
+                # Assume default publisher or older repository.
+                remove_certs()
+        else:
+                # Add for each publisher specified.
+                map(remove_certs, pubs)
+
+        return pubs, failed
 
 
 def subcmd_remove_signing_ca_cert(conf, args):
         subcommand = "remove-signing-ca-cert"
-        repo = get_repo(conf, read_only=False)
-        opts, pargs = getopt.getopt(args, "")
-
-        if len(pargs) < 1:
-                usage(_("At least one certificate hash must be provided."))
-
-        repo.remove_signing_certs(pargs, ca=True)
+        pubs, failed = _remove_certs(conf, subcommand, args, True)
+        if failed:
+                for pfx, details in failed:
+                        error(_("Unable to remove signing ca certificates for "
+                            "publisher '%(pfx)s':\n%(details)s") % locals(),
+                            cmd=subcommand)
+                if len(failed) < len(pubs):
+                        return EXIT_PARTIAL
+                return EXIT_OOPS
+        return EXIT_OK
 
 
 def subcmd_remove_signing_intermediate_cert(conf, args):
         subcommand = "remove-signing-intermediate-cert"
-        repo = get_repo(conf, read_only=False)
-        opts, pargs = getopt.getopt(args, "")
-
-        if len(pargs) < 1:
-                usage(_("At least one certificate hash must be provided."))
-
-        repo.remove_signing_certs(pargs, ca=False)
-
-
-def subcmd_create(conf, args):
-        """Create a package repository at the given location."""
-
-        subcommand = "create"
-        opts, pargs = getopt.getopt(args, "")
-
-        if len(pargs) > 1:
-                usage(_("Only one repository location may be specified."),
-                    cmd=subcommand)
-        elif pargs:
-                conf["repo_root"] = parse_uri(pargs[0])
-
-        repo_root = conf.get("repo_root", None)
-        if not repo_root:
-                usage(_("No repository location specified."), cmd=subcommand)
-
-        # Attempt to create a repository at the specified location.  Allow
-        # whatever exceptions are raised to bubble up.
-        sr.repository_create(repo_root)
-
+        pubs, failed = _remove_certs(conf, subcommand, args, True)
+        if failed:
+                for pfx, details in failed:
+                        if pfx:
+                                error(_("Unable to remove signing intermediate "
+                                    "certificates for publisher '%(pfx)s':\n"
+                                    "%(details)s") % locals(), cmd=subcommand)
+                        else:
+                                error(_("Unable to remove signing intermediate "
+                                    "certificates:\n%(details)s") % locals(),
+                                    cmd=subcommand)
+                if len(failed) < len(pubs):
+                        return EXIT_PARTIAL
+                return EXIT_OOPS
         return EXIT_OK
 
 
@@ -271,8 +348,48 @@
         def get_value(record):
                 return record[2]
 
+        def quote_value(val):
+                if out_format == "tsv":
+                        # Expand tabs if tsv output requested.
+                        val = val.replace("\t", " " * 8)
+                nval = val
+                # Escape bourne shell metacharacters.
+                for c in ("\\", " ", "\t", "\n", "'", "`", ";", "&", "(", ")",
+                    "|", "^", "<", ">"):
+                        nval = nval.replace(c, "\\" + c)
+                return nval
+
         def set_value(entry):
-                entry[0][2] = entry[1]
+                val = entry[1]
+                multi_value = False
+                if isinstance(val, (list, set)):
+                        multi_value = True
+                elif val == "":
+                        entry[0][2] = '""'
+                        return
+                elif val is None:
+                        entry[0][2] = ''
+                        return
+                else:
+                        val = [val]
+
+                nval = []
+                for v in val:
+                        if v == "":
+                                # Indicate empty string value using "".
+                                nval.append('""')
+                        elif v is None:
+                                # Indicate no value using empty string.
+                                nval.append('')
+                        else:
+                                # Otherwise, escape the value to be displayed.
+                                nval.append(quote_value(str(v)))
+
+                val = " ".join(nval)
+                nval = None
+                if multi_value:
+                        val = "(%s)" % val
+                entry[0][2] = val
 
         if out_format == "default":
                 # Create a formatting string for the default output
@@ -306,16 +423,98 @@
                 msg(fmt % tuple(values))
 
 
-def subcmd_property(conf, args):
-        """Display the list of properties for the repository."""
+def get_repo(conf, read_only=True, subcommand=None):
+        """Return the repository object for current program configuration."""
+
+        repo_uri = conf["repo_uri"]
+        if repo_uri.scheme != "file":
+                usage(_("Network repositories are not currently supported "
+                    "for this operation."), cmd=subcommand)
+
+        path = repo_uri.get_pathname()
+        if not path:
+                # Bad URI?
+                raise sr.RepositoryInvalidError(str(repo_uri))
+        return sr.Repository(read_only=read_only, root=path)
+
+
+def setup_transport(conf):
+        repo_uri = conf.get("repo_uri", None)
+        if not repo_uri:
+                usage(_("No repository location specified."), cmd=subcommand)
+
+        temp_root = misc.config_temp_root()
+
+        tmp_dir = tempfile.mkdtemp(dir=temp_root)
+        tmpdirs.append(tmp_dir)
+
+        incoming_dir = tempfile.mkdtemp(dir=temp_root)
+        tmpdirs.append(incoming_dir)
+
+        cache_dir = tempfile.mkdtemp(dir=temp_root)
+        tmpdirs.append(cache_dir)
+
+        # Create transport and transport config.
+        xport, xport_cfg = transport.setup_transport()
+        xport_cfg.cached_download_dir = cache_dir
+        xport_cfg.incoming_download_dir = incoming_dir
+
+        # Configure target publisher.
+        src_pub = transport.setup_publisher(str(repo_uri), "target", xport,
+            xport_cfg, remote_prefix=True)
+
+        return xport, src_pub, tmp_dir
+
 
-        subcommand = "property"
-        repo = get_repo(conf)
+def subcmd_create(conf, args):
+        """Create a package repository at the given location."""
+
+        subcommand = "create"
+
+        opts, pargs = getopt.getopt(args, "s:", ["version="])
+
+        version = None
+        for opt, arg in opts:
+                if opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
+                elif opt == "--version":
+                        # This option is currently private and allows creating a
+                        # repository with a specific format based on version.
+                        try:
+                                version = int(arg)
+                        except ValueError:
+                                usage(_("Version must be an integer value."),
+                                    cmd=subcommand)
 
+        if len(pargs) > 1:
+                usage(_("Only one repository location may be specified."),
+                    cmd=subcommand)
+        elif pargs:
+                conf["repo_uri"] = parse_uri(pargs[0])
+
+        repo_uri = conf.get("repo_uri", None)
+        if not repo_uri:
+                usage(_("No repository location specified."), cmd=subcommand)
+        if repo_uri.scheme != "file":
+                usage(_("Network repositories are not currently supported "
+                    "for this operation."), cmd=subcommand)
+
+        # Attempt to create a repository at the specified location.  Allow
+        # whatever exceptions are raised to bubble up.
+        sr.repository_create(repo_uri, version=version)
+
+        return EXIT_OK
+
+
+def subcmd_get(conf, args):
+        """Display repository properties."""
+
+        subcommand = "get"
         omit_headers = False
         out_format = "default"
+        pubs = set()
 
-        opts, pargs = getopt.getopt(args, "F:H")
+        opts, pargs = getopt.getopt(args, "F:Hp:s:")
         for opt, arg in opts:
                 if opt == "-F":
                         out_format = arg
@@ -323,14 +522,36 @@
                                 usage(_("Unrecognized format %(format)s."
                                     " Supported formats: %(valid)s") % \
                                     { "format": out_format,
-                                    "valid": LISTING_FORMATS }, cmd="publisher")
+                                    "valid": LISTING_FORMATS }, cmd="get")
                                 return EXIT_OOPS
                 elif opt == "-H":
                         omit_headers = True
+                elif opt == "-p":
+                        pubs.add(arg)
+                elif opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
+
+        # Setup transport so configuration can be retrieved.
+        if not conf.get("repo_uri", None):
+                usage(_("A package repository location must be provided "
+                    "using -s."), cmd=subcommand)
+        xport, xpub, tmp_dir = setup_transport(conf)
+
+        # Get properties.
+        if pubs:
+                return _get_pub(conf, subcommand, xport, xpub, omit_headers,
+                    out_format, pubs, pargs)
+        return _get_repo(conf, subcommand, xport, xpub, omit_headers,
+            out_format, pargs)
+
+
+def _get_repo(conf, subcommand, xport, xpub, omit_headers, out_format, pargs):
+        """Display repository properties."""
 
         # Configuration index is indexed by section name and property name.
-        # Flatten it to simplify listing process.
-        cfg_idx = repo.cfg.get_index()
+        # Retrieve and flatten it to simplify listing process.
+        stat_idx = xport.get_status(xpub)
+        cfg_idx = stat_idx.get("repository", {}).get("configuration", {})
         props = set()
 
         # Set minimum widths for section and property name columns by using the
@@ -343,11 +564,11 @@
                 for pname in cfg_idx[sname]:
                         max_pname_len = max(max_pname_len, len(pname))
                         props.add("/".join((sname, pname)))
-        del cfg_idx
 
-        if len(pargs) >= 1:
-                found = props & set(pargs)
-                notfound = set(pargs) - found
+        req_props = set(pargs)
+        if len(req_props) >= 1:
+                found = props & req_props
+                notfound = req_props - found
                 del props
         else:
                 found = props
@@ -356,7 +577,7 @@
         def gen_listing():
                 for prop in sorted(found):
                         sname, pname = prop.rsplit("/", 1)
-                        sval = str(repo.cfg.get_property(sname, pname))
+                        sval = cfg_idx[sname][pname]
                         yield {
                             "section": sname,
                             "property": pname,
@@ -372,19 +593,19 @@
             "property" : [("default", "tsv"), _("PROPERTY"), ""],
             "value" : [("default", "tsv"), _("VALUE"), ""],
         }
-        desired_field_order = (_("SECTION"), "", _("PROPERTY"), _("VALUE"))
+        desired_field_order = ((_("SECTION"), _("PROPERTY"), _("VALUE")))
 
         # Default output formatting.
         def_fmt = "%-" + str(max_sname_len) + "s %-" + str(max_pname_len) + \
             "s %s"
 
-        if found or (not pargs and out_format == "default"):
+        if found or (not req_props and out_format == "default"):
                 print_col_listing(desired_field_order, field_data,
                     gen_listing(), out_format, def_fmt, omit_headers)
 
         if found and notfound:
                 return EXIT_PARTIAL
-        if pargs and not found:
+        if req_props and not found:
                 if out_format == "default":
                         # Don't pollute other output formats.
                         error(_("no matching properties found"),
@@ -393,110 +614,235 @@
         return EXIT_OK
 
 
-def subcmd_set_property(conf, args):
-        """Set a repository property."""
-
-        subcommand = "property"
-        repo = get_repo(conf, read_only=False)
+def _get_pub(conf, subcommand, xport, xpub, omit_headers, out_format, pubs,
+    pargs):
+        """Display publisher properties."""
 
-        omit_headers = False
-        out_format = "default"
-
-        opts, pargs = getopt.getopt(args, "")
-        bad_args = False
-        if not pargs or len(pargs) > 1:
-                bad_args = True
+        # Retrieve publisher information.
+        pub_data = xport.get_publisherdata(xpub)
+        known_pubs = set(p.prefix for p in pub_data)
+        if len(pubs) > 0 and "all" not in pubs:
+                found = known_pubs & pubs
+                notfound = pubs - found
         else:
-                try:
-                        if len(pargs) == 1:
-                                prop, val = pargs[0].split("=", 1)
-                                sname, pname = prop.rsplit("/", 1)
-                except ValueError:
-                        bad_args = True
+                found = known_pubs
+                notfound = set()
+
+        # Establish initial return value and perform early exit if appropriate.
+        rval = EXIT_OK
+        if found and notfound:
+                rval = EXIT_PARTIAL
+        elif pubs and not found:
+                if out_format == "default":
+                        # Don't pollute other output formats.
+                        error(_("no matching publishers found"),
+                            cmd=subcommand)
+                return EXIT_OOPS
+
+        # Set minimum widths for section and property name columns by using the
+        # length of the column headers and data.
+        max_pubname_len = str(max(
+            [len(_("PUBLISHER"))] + [len(p) for p in found]
+        ))
+        max_sname_len = len(_("SECTION"))
+        max_pname_len = len(_("PROPERTY"))
+
+        # For each requested publisher, retrieve the requested property data.
+        failed = set()
+        pub_idx = {}
+        for pub in pub_data:
+                if pub.prefix not in found:
+                        continue
+
+                pub_idx[pub.prefix] = {
+                    "publisher": {
+                        "alias": pub.alias,
+                        "prefix": pub.prefix,
+                    },
+                }
+
+                pub_repo = pub.selected_repository
+                if pub_repo:
+                        pub_idx[pub.prefix]["repository"] = {
+                            "collection-type": pub_repo.collection_type,
+                            "description": pub_repo.description,
+                            "legal-uris": pub_repo.legal_uris,
+                            "mirrors": pub_repo.mirrors,
+                            "name": pub_repo.name,
+                            "origins": pub_repo.origins,
+                            "refresh-seconds": pub_repo.refresh_seconds,
+                            "registration-uri": pub_repo.registration_uri,
+                            "related-uris": pub_repo.related_uris,
+                        }
+                else:
+                        pub_idx[pub.prefix]["repository"] = {
+                            "collection-type": "core",
+                            "description": "",
+                            "legal-uris": [],
+                            "mirrors": [],
+                            "name": "",
+                            "origins": [],
+                            "refresh-seconds": "",
+                            "registration-uri": "",
+                            "related-uris": [],
+                        }
 
-        if bad_args:
-                usage(_("a property name and value must be provided in the "
-                    "form <section/property>=<value> or "
-                    "<section/property>=([\"<value>\", ...])"))
+        # Determine possible set of properties and lengths.
+        props = set()
+        for pub in pub_idx:
+            for sname in pub_idx[pub]:
+                    max_sname_len = max(max_sname_len, len(sname))
+                    for pname in pub_idx[pub][sname]:
+                            max_pname_len = max(max_pname_len, len(pname))
+                            props.add("/".join((sname, pname)))
+
+        # Determine properties to display.
+        req_props = set(pargs)
+        if len(req_props) >= 1:
+                found = props & req_props
+                notfound = req_props - found
+                del props
+        else:
+                found = props
+                notfound = set()
+
+        def gen_listing():
+                for pub in sorted(pub_idx.keys()):
+                        for prop in sorted(found):
+                                sname, pname = prop.rsplit("/", 1)
+                                sval = pub_idx[pub][sname][pname]
+                                yield {
+                                    "publisher": pub,
+                                    "section": sname,
+                                    "property": pname,
+                                    "value": sval,
+                                }
 
-        if len(val) > 0  and val[0] == "(" and val[-1] == ")":
-                val = shlex.split(val.strip("()"))
+        #    PUBLISHER SECTION PROPERTY VALUE
+        #    <pub_1>   <sec_1> <prop_1> <prop_1_value>
+        #    <pub_1>   <sec_2> <prop_2> <prop_2_value>
+        #    ...
+        field_data = {
+            "publisher" : [("default", "tsv"), _("PUBLISHER"), ""],
+            "section" : [("default", "tsv"), _("SECTION"), ""],
+            "property" : [("default", "tsv"), _("PROPERTY"), ""],
+            "value" : [("default", "tsv"), _("VALUE"), ""],
+        }
+        desired_field_order = (_("PUBLISHER"), _("SECTION"), _("PROPERTY"),
+            _("VALUE"))
 
-        repo.cfg.set_property(sname, pname, val)
-        repo.write_config()
+        # Default output formatting.
+        def_fmt = "%-" + str(max_pubname_len) + "s %-" + str(max_sname_len) + \
+            "s %-" + str(max_pname_len) + "s %s"
+
+        if found or (not req_props and out_format == "default"):
+                print_col_listing(desired_field_order, field_data,
+                    gen_listing(), out_format, def_fmt, omit_headers)
+
+        if found and notfound:
+                rval = EXIT_PARTIAL
+        if req_props and not found:
+                if out_format == "default":
+                        # Don't pollute other output formats.
+                        error(_("no matching properties found"),
+                            cmd=subcommand)
+                rval = EXIT_OOPS
+        return rval
 
 
-def subcmd_publisher(conf, args):
+def subcmd_info(conf, args):
         """Display a list of known publishers and a summary of known packages
         and when the package data for the given publisher was last updated.
         """
 
-        subcommand = "publisher"
-        repo = get_repo(conf)
-
+        subcommand = "info"
         omit_headers = False
         out_format = "default"
+        pubs = set()
 
-        opts, pargs = getopt.getopt(args, "F:H")
+        opts, pargs = getopt.getopt(args, "F:Hp:s:")
         for opt, arg in opts:
                 if opt == "-F":
-                        out_format = arg
-                        if out_format not in LISTING_FORMATS:
+                        if arg not in LISTING_FORMATS:
                                 usage(_("Unrecognized format %(format)s."
                                     " Supported formats: %(valid)s") % \
-                                    { "format": out_format,
+                                    { "format": arg,
                                     "valid": LISTING_FORMATS }, cmd="publisher")
                                 return EXIT_OOPS
+                        out_format = arg
                 elif opt == "-H":
                         omit_headers = True
+                elif opt == "-p":
+                        pubs.add(arg)
+                elif opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
 
-        cat = repo.catalog
-        pub_idx = {}
-        for pub, pkg_count, pkg_ver_count in cat.get_package_counts_by_pub():
-                pub_idx[pub] = (pkg_count, pkg_ver_count)
+        if pargs:
+                usage(_("command does not take operands"), cmd=subcommand)
 
-        if len(pargs) >= 1:
-                found = set(pub_idx.keys()) & set(pargs)
-                notfound = set(pargs) - found
+        # Setup transport so status can be retrieved.
+        if not conf.get("repo_uri", None):
+                usage(_("A package repository location must be provided "
+                    "using -s."), cmd=subcommand)
+        xport, xpub, tmp_dir = setup_transport(conf)
+
+        # Retrieve repository status information.
+        stat_idx = xport.get_status(xpub)
+        pub_idx = stat_idx.get("repository", {}).get("publishers", {})
+        if len(pubs) > 0 and "all" not in pubs:
+                found = set(pub_idx.keys()) & pubs
+                notfound = pubs - found
         else:
                 found = set(pub_idx.keys())
                 notfound = set()
 
         def gen_listing():
                 for pfx in found:
-                        pkg_count, pkg_ver_count = pub_idx[pfx]
+                        pdata = pub_idx[pfx]
+                        pkg_count = pdata.get("package-count", 0)
+                        last_update = pdata.get("last-catalog-update", "")
+                        if last_update:
+                                # Reformat the date into something more user
+                                # friendly (and locale specific).
+                                last_update = pkg.catalog.basic_ts_to_datetime(
+                                    last_update)
+                                last_update = "%sZ" % pkg.catalog.datetime_to_ts(
+                                    last_update)
+                        rstatus = _(pub_idx[pfx].get("status", "online"))
                         yield {
                             "publisher": pfx,
                             "packages": pkg_count,
-                            "versions": pkg_ver_count,
-                            "updated": "%sZ" % cat.last_modified.isoformat(),
+                            "status": rstatus,
+                            "updated": last_update,
                         }
 
-        #    PUBLISHER PACKAGES        VERSIONS       UPDATED
-        #    <pub_1>   <num_uniq_pkgs> <num_pkg_vers> <cat_last_modified>
-        #    <pub_2>   <num_uniq_pkgs> <num_pkg_vers> <cat_last_modified>
+        #    PUBLISHER PACKAGES        STATUS   UPDATED
+        #    <pub_1>   <num_uniq_pkgs> <status> <cat_last_modified>
+        #    <pub_2>   <num_uniq_pkgs> <status> <cat_last_modified>
         #    ...
-
         field_data = {
             "publisher" : [("default", "tsv"), _("PUBLISHER"), ""],
             "packages" : [("default", "tsv"), _("PACKAGES"), ""],
-            "versions" : [("default", "tsv"), _("VERSIONS"), ""],
+            "status" : [("default", "tsv"), _("STATUS"), ""],
             "updated" : [("default", "tsv"), _("UPDATED"), ""],
         }
 
-        desired_field_order = (_("PUBLISHER"), "", _("PACKAGES"), _("VERSIONS"),
+        desired_field_order = (_("PUBLISHER"), "", _("PACKAGES"), _("STATUS"),
             _("UPDATED"))
 
         # Default output formatting.
-        def_fmt = "%-24s %-8s %-8s %s"
+        pub_len = str(max(
+            [len(desired_field_order[0])] + [len(p) for p in found]
+        ))
+        def_fmt = "%-" + pub_len + "s %-8s %-16s %s"
 
-        if found or (not pargs and out_format == "default"):
+        if found or (not pubs and out_format == "default"):
                 print_col_listing(desired_field_order, field_data,
                     gen_listing(), out_format, def_fmt, omit_headers)
 
         if found and notfound:
                 return EXIT_PARTIAL
-        if pargs and not found:
+        if pubs and not found:
                 if out_format == "default":
                         # Don't pollute other output formats.
                         error(_("no matching publishers found"),
@@ -509,24 +855,38 @@
         """Rebuild the repository's catalog and index data (as permitted)."""
 
         subcommand = "rebuild"
-        repo = get_repo(conf, read_only=False)
-
+        build_catalog = True
         build_index = True
-        opts, pargs = getopt.getopt(args, "", ["no-index"])
+
+        opts, pargs = getopt.getopt(args, "s:", ["no-catalog", "no-index"])
         for opt, arg in opts:
-                if opt == "--no-index":
+                if opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
+                elif opt == "--no-catalog":
+                        build_catalog = False
+                elif opt == "--no-index":
                         build_index = False
 
         if pargs:
                 usage(_("command does not take operands"), cmd=subcommand)
 
-        logger.info("Rebuilding package repository...")
-        repo.rebuild(build_index=False)
+        if not build_catalog and not build_index:
+                # Why?  Who knows; but do what was requested--nothing!
+                return EXIT_OK
 
-        if build_index:
-                # Always build search indexes seperately (and if permitted).
-                logger.info("Building search indexes...")
-                repo.refresh_index()
+        # Setup transport so operation can be performed.
+        if not conf.get("repo_uri", None):
+                usage(_("A package repository location must be provided "
+                    "using -s."), cmd=subcommand)
+        xport, src_pub, tmp_dir = setup_transport(conf)
+
+        logger.info("Repository rebuild initiated.")
+        if build_catalog and build_index:
+                xport.publish_rebuild(src_pub)
+        elif build_catalog:
+                xport.publish_rebuild_packages(src_pub)
+        elif build_index:
+                xport.publish_rebuild_indexes(src_pub)
 
         return EXIT_OK
 
@@ -535,13 +895,14 @@
         """Refresh the repository's catalog and index data (as permitted)."""
 
         subcommand = "refresh"
-        repo = get_repo(conf, read_only=False)
-
         add_content = True
         refresh_index = True
-        opts, pargs = getopt.getopt(args, "", ["no-catalog", "no-index"])
+
+        opts, pargs = getopt.getopt(args, "s:", ["no-catalog", "no-index"])
         for opt, arg in opts:
-                if opt == "--no-catalog":
+                if opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
+                elif opt == "--no-catalog":
                         add_content = False
                 elif opt == "--no-index":
                         refresh_index = False
@@ -553,14 +914,188 @@
                 # Why?  Who knows; but do what was requested--nothing!
                 return EXIT_OK
 
-        if add_content:
-                logger.info("Adding new package content...")
-                repo.add_content(refresh_index=False)
+        # Setup transport so operation can be performed.
+        if not conf.get("repo_uri", None):
+                usage(_("A package repository location must be provided "
+                    "using -s."), cmd=subcommand)
+        xport, src_pub, tmp_dir = setup_transport(conf)
+
+        logger.info("Repository refresh initiated.")
+        if add_content and refresh_index:
+                xport.publish_refresh(src_pub)
+        elif add_content:
+                xport.publish_refresh_packages(src_pub)
+        elif refresh_index:
+                xport.publish_refresh_indexes(src_pub)
+        return EXIT_OK
+
+
+def subcmd_set(conf, args):
+        """Set repository properties."""
+
+        subcommand = "set"
+        omit_headers = False
+        pubs = set()
+
+        opts, pargs = getopt.getopt(args, "p:s:")
+        for opt, arg in opts:
+                if opt == "-p":
+                        pubs.add(arg)
+                elif opt == "-s":
+                        conf["repo_uri"] = parse_uri(arg)
+
+        bad_args = False
+        props = {}
+        if not pargs:
+                bad_args = True
+        else:
+                for arg in pargs:
+                        try:
+                                # Attempt to parse property into components.
+                                prop, val = arg.split("=", 1)
+                                sname, pname = prop.rsplit("/", 1)
+
+                                # Store property values by section.
+                                props.setdefault(sname, {})
+
+                                # Parse the property value into a list if
+                                # necessary, otherwise append it to the list
+                                # of values for the property.
+                                if len(val) > 0  and val[0] == "(" and \
+                                    val[-1] == ")":
+                                        val = shlex.split(val.strip("()"))
+
+                                if sname in props and pname in props[sname]:
+                                        # Determine if previous value is already
+                                        # a list, and if not, convert and append
+                                        # the value.
+                                        pval = props[sname][pname]
+                                        if not isinstance(pval, list):
+                                                pval = [pval]
+                                        if isinstance(val, list):
+                                                pval.extend(val)
+                                        else:
+                                                pval.append(val)
+                                        props[sname][pname] = pval
+                                else:
+                                        # Otherwise, just store the value.
+                                        props[sname][pname] = val
+                        except ValueError:
+                                bad_args = True
+                                break
+
+        if bad_args:
+                usage(_("a property name and value must be provided in the "
+                    "form <section/property>=<value> or "
+                    "<section/property>=([\"<value>\" ...])"))
+
+        # Get repository object.
+        if not conf.get("repo_uri", None):
+                usage(_("A package repository location must be provided "
+                    "using -s."), cmd=subcommand)
+        repo = get_repo(conf, read_only=False, subcommand=subcommand)
+
+        # Set properties.
+        if pubs:
+                return _set_pub(conf, subcommand, omit_headers, props, pubs,
+                    repo)
+
+        return _set_repo(conf, subcommand, omit_headers, props, repo)
+
 
-        if refresh_index:
-                # Always update search indexes separately (and if permitted).
-                logger.info("Updating search indexes...")
-                repo.refresh_index()
+def _set_pub(conf, subcommand, omit_headers, props, pubs, repo):
+        """Set publisher properties."""
+
+        for sname, sprops in props.iteritems():
+                if sname not in ("publisher", "repository"):
+                        usage(_("unknown property section "
+                            "'%s'") % sname, cmd=subcommand)
+                for pname in sprops:
+                        if sname == "publisher" and pname =="prefix":
+                                usage(_("'%s' may not be set using "
+                                    "this command" % pname))
+                        attrname = pname.replace("-", "_")
+                        if not hasattr(publisher.Publisher, attrname) and \
+                            not hasattr(publisher.Repository, attrname):
+                                usage(_("unknown property '%s'") %
+                                    pname, cmd=subcommand)
+
+        if "all" in pubs:
+                # Default to list of all publishers.
+                pubs = repo.publishers
+                if not pubs:
+                        # If there are still no known publishers, this
+                        # operation cannot succeed, so fail now.
+                        usage(_("One or more publishers must be specified to "
+                            "create and set properties for as none exist yet."),
+                            cmd=subcommand)
+
+        # Get publishers and update properties.
+        failed = []
+        new_pub = False
+        for pfx in pubs:
+                try:
+                        # Get a copy of the existing publisher.
+                        pub = copy.copy(repo.get_publisher(pfx))
+                except sr.RepositoryUnknownPublisher, e:
+                        pub = publisher.Publisher(pfx)
+                        new_pub = True
+                except sr.RepositoryError, e:
+                        failed.append((pfx, e))
+                        continue
+
+                try:
+                        # Set/update the publisher's properties.
+                        for sname, sprops in props.iteritems():
+                                if sname == "publisher":
+                                        target = pub
+                                elif sname == "repository":
+                                        target = pub.selected_repository
+                                        if not target:
+                                                target = publisher.Repository()
+                                                pub.repositories.append(target)
+
+                                for pname, val in sprops.iteritems():
+                                        attrname = pname.replace("-", "_")
+                                        pval = getattr(target, attrname)
+                                        if isinstance(pval, list) and \
+                                            not isinstance(val, list):
+                                                # If the target property expects
+                                                # a list, transform the provided
+                                                # value into one if it isn't
+                                                # already.
+                                                if val == "":
+                                                        val = []
+                                                else:
+                                                        val = [val]
+                                        setattr(target, attrname, val)
+                except apx.ApiException, e:
+                        failed.append((pfx, e))
+                        continue
+
+                if new_pub:
+                        repo.add_publisher(pub)
+                else:
+                        repo.update_publisher(pub)
+
+        if failed:
+                for pfx, details in failed:
+                        error(_("Unable to set properties for publisher "
+                            "'%(pfx)s':\n%(details)s") % locals())
+                if len(failed) < len(pubs):
+                        return EXIT_PARTIAL
+                return EXIT_OOPS
+        return EXIT_OK
+
+
+def _set_repo(conf, subcommand, omit_headers, props, repo):
+        """Set repository properties."""
+
+        # Set properties.
+        for sname, props in props.iteritems():
+                for pname, val in props.iteritems():
+                        repo.cfg.set_property(sname, pname, val)
+        repo.write_config()
 
         return EXIT_OK
 
@@ -569,10 +1104,6 @@
         """Display the version of the pkg(5) API."""
 
         subcommand = "version"
-
-        if conf.get("repo_root", None):
-                usage(_("-s not allowed for %s subcommand") %
-                      subcommand)
         if args:
                 usage(_("command does not take operands"), cmd=subcommand)
         msg(pkg.VERSION)
@@ -582,6 +1113,18 @@
 def main_func():
         global_settings.client_name = PKG_CLIENT_NAME
 
+        global orig_cwd
+
+        try:
+                orig_cwd = os.getcwd()
+        except OSError, e:
+                try:
+                        orig_cwd = os.environ["PWD"]
+                        if not orig_cwd or orig_cwd[0] != "/":
+                                orig_cwd = None
+                except KeyError:
+                        orig_cwd = None
+
         try:
                 opts, pargs = getopt.getopt(sys.argv[1:], "s:?",
                     ["help"])
@@ -592,9 +1135,7 @@
         show_usage = False
         for opt, arg in opts:
                 if opt == "-s":
-                        if not arg:
-                                continue
-                        conf["repo_root"] = parse_uri(arg)
+                        conf["repo_uri"] = parse_uri(arg)
                 elif opt in ("--help", "-?"):
                         show_usage = True
 
@@ -612,19 +1153,17 @@
         subcommand = subcommand.replace("-", "_")
         func = globals().get("subcmd_%s" % subcommand, None)
         if not func:
+                subcommand = subcommand.replace("_", "-")
                 usage(_("unknown subcommand '%s'") % subcommand)
 
         try:
-                if (subcommand != "create" and subcommand != "version") and \
-                    not conf.get("repo_root", None):
-                        usage(_("A package repository location must be "
-                            "provided using -s."), cmd=subcommand)
                 return func(conf, pargs)
         except getopt.GetoptError, e:
                 if e.opt in ("help", "?"):
                         usage(full=True)
                 usage(_("illegal option -- %s") % e.opt, cmd=subcommand)
 
+
 #
 # Establish a specific exit status which means: "python barfed an exception"
 # so that we can more easily detect these in testing of the CLI commands.
@@ -666,6 +1205,9 @@
                      'api': __e.expected_version
                     })
                 __ret = EXIT_OOPS
+        except apx.BadRepositoryURI, __e:
+                error(str(__e))
+                __ret = EXIT_BADOPT
         except (apx.ApiException, sr.RepositoryError), __e:
                 error(str(__e))
                 __ret = EXIT_OOPS
--- a/src/publish.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/publish.py	Thu Aug 19 23:33:49 2010 -0700
@@ -201,9 +201,10 @@
 def trans_close(repo_uri, args):
         abandon = False
         trans_id = None
-        refresh_index = True
         add_to_catalog = True
 
+        # --no-index is now silently ignored as the publication process no
+        # longer builds search indexes automatically.
         opts, pargs = getopt.getopt(args, "At:", ["no-index", "no-catalog"])
 
         for opt, arg in opts:
@@ -211,8 +212,6 @@
                         abandon = True
                 elif opt == "-t":
                         trans_id = arg
-                elif opt == "--no-index":
-                        refresh_index = False
                 elif opt == "--no-catalog":
                         add_to_catalog = False
         if trans_id is None:
@@ -223,9 +222,9 @@
                             "$PKG_TRANS_ID."), cmd="close")
 
         xport, pub = setup_transport_and_pubs(repo_uri)
-        t = trans.Transaction(repo_uri, trans_id=trans_id,
-            add_to_catalog=add_to_catalog, xport=xport, pub=pub)
-        pkg_state, pkg_fmri = t.close(abandon, refresh_index)
+        t = trans.Transaction(repo_uri, trans_id=trans_id, xport=xport, pub=pub)
+        pkg_state, pkg_fmri = t.close(abandon=abandon,
+            add_to_catalog=add_to_catalog)
         for val in (pkg_state, pkg_fmri):
                 if val is not None:
                         msg(val)
@@ -259,7 +258,6 @@
         basedirs = []
         timestamp_files = []
 
-        refresh_index = True
         add_to_catalog = True
         embedded_fmri = False
 
@@ -335,7 +333,7 @@
 
         xport, pub = setup_transport_and_pubs(repo_uri)
         t = trans.Transaction(repo_uri, pkg_name=pkg_name,
-            refresh_index=refresh_index, xport=xport, pub=pub)
+            xport=xport, pub=pub)
         t.open()
 
         for a in m.gen_actions():
@@ -365,7 +363,7 @@
                         raise
 
         pkg_state, pkg_fmri = t.close(abandon=False,
-            refresh_index=refresh_index, add_to_catalog=add_to_catalog)
+            add_to_catalog=add_to_catalog)
         for val in (pkg_state, pkg_fmri):
                 if val is not None:
                         msg(val)
@@ -576,7 +574,8 @@
 
         xport, pub = setup_transport_and_pubs(repo_uri)
         try:
-                t = trans.Transaction(repo_uri, xport=xport, pub=pub).refresh_index()
+                t = trans.Transaction(repo_uri, xport=xport,
+                    pub=pub).refresh_index()
         except trans.TransactionError, e:
                 error(e, cmd="refresh-index")
                 return 1
@@ -668,8 +667,7 @@
         try:
                 __ret = main_func()
         except (pkg.actions.ActionError, trans.TransactionError,
-            RuntimeError, pkg.fmri.IllegalFmri, apx.BadRepositoryURI,
-            apx.UnsupportedRepositoryURI, apx.InvalidPackageErrors), _e:
+            RuntimeError, pkg.fmri.IllegalFmri, apx.ApiException), _e:
                 print >> sys.stderr, "pkgsend: %s" % _e
                 __ret = 1
         except (PipeError, KeyboardInterrupt):
--- a/src/pull.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/pull.py	Thu Aug 19 23:33:49 2010 -0700
@@ -196,7 +196,7 @@
         path = urllib.url2pathname(parts[2])
 
         try:
-                repo = sr.Repository(read_only=True, repo_root=path)
+                repo = sr.Repository(read_only=True, root=path)
         except EnvironmentError, _e:
                 error("an error occurred while trying to " \
                     "initialize the repository directory " \
@@ -391,7 +391,7 @@
         src_pub = None
         targ_pub = None
 
-        temp_root = config_temp_root()
+        temp_root = misc.config_temp_root()
 
         gettext.install("pkg", "/usr/share/locale")
 
@@ -460,7 +460,6 @@
         if pargs == None or len(pargs) == 0:
                 usage(_("must specify at least one pkgfmri"))
 
-        defer_refresh = False
         republish = False
 
         if not target:
@@ -476,10 +475,6 @@
                 # Files have to be decompressed for republishing.
                 keep_compressed = False
                 if target.startswith("file://"):
-                        # For efficiency, and publishing speed, don't update
-                        # indexes until all file publishing is finished.
-                        defer_refresh = True
-
                         # Check to see if the repository exists first.
                         try:
                                 t = trans.Transaction(target, xport=xport,
@@ -603,8 +598,7 @@
 
                 try:
                         t = trans.Transaction(target, pkg_name=pkg_name,
-                            trans_id=trans_id, refresh_index=not defer_refresh,
-                            xport=xport, pub=targ_pub)
+                            trans_id=trans_id, xport=xport, pub=targ_pub)
 
                         # Remove any previous failed attempt to
                         # to republish this package.
@@ -630,24 +624,13 @@
                                         a.data = lambda: open(fname,
                                             "rb")
                                 t.add(a)
-                        t.close(refresh_index=not defer_refresh)
+                        t.close()
                 except trans.TransactionError, e:
                         abort(err=e)
                         return 1
 
         # Dump all temporary data.
         cleanup()
-
-        if republish:
-                if defer_refresh:
-                        msg(_("Refreshing repository search indices ..."))
-                        try:
-                                t = trans.Transaction(target, xport=xport,
-                                    pub=targ_pub)
-                                t.refresh_index()
-                        except trans.TransactionError, e:
-                                error(e)
-                                return 1
         return 0
 
 if __name__ == "__main__":
--- a/src/sign.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/sign.py	Thu Aug 19 23:33:49 2010 -0700
@@ -44,7 +44,6 @@
 import pkg.manifest as manifest
 import pkg.misc as misc
 import pkg.publish.transaction as trans
-import pkg.server.repository as sr
 from pkg.client import global_settings
 from pkg.misc import emsg, msg, PipeError
 
@@ -138,7 +137,6 @@
         cert_path = None
         key_path = None
         chain_certs = []
-        refresh_index = True
         add_to_catalog = True
         set_alg = False
         sign_all = False
@@ -172,8 +170,6 @@
                         repo_uri = arg
                 elif opt == "--help":
                         show_usage = True
-                elif opt == "--no-index":
-                        refresh_index = False
                 elif opt == "--no-catalog":
                         add_to_catalog = False
                 elif opt == "--sign-all":
@@ -238,7 +234,7 @@
                     list_packages=sign_all)
                 if not sign_all:
                         fmris = pargs
-                succesful_publish = False
+                successful_publish = False
 
                 for pfmri in fmris:
                         try:
@@ -271,25 +267,24 @@
                                 # published manifest.
                                 t = trans.Transaction(repo_uri,
                                     pkg_name=str(pfmri), xport=xport,
-                                    pub=src_pub, refresh_index=refresh_index)
+                                    pub=src_pub)
                                 t.append()
                                 try:
                                         t.add(a)
                                         for c in chain_certs:
                                                 t.add_file(c)
-                                        t.close(refresh_index=refresh_index,
-                                            add_to_catalog=add_to_catalog)
+                                        t.close(add_to_catalog=add_to_catalog)
                                 except:
                                         t.close(abandon=True)
                                         raise
                                 msg(_("Signed %s") % pfmri)
-                                succesful_publish = True
+                                successful_publish = True
                         except (api_errors.ApiException, fmri.FmriError,
                             trans.TransactionError), e:
                                 errors.append(e)
                 if errors:
                         error("\n".join([str(e) for e in errors]))
-                        if succesful_publish:
+                        if successful_publish:
                                 return EXIT_PARTIAL
                         else:
                                 return EXIT_OOPS
--- a/src/tests/api/t_api.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/api/t_api.py	Thu Aug 19 23:33:49 2010 -0700
@@ -40,7 +40,7 @@
 import time
 import unittest
 
-API_VERSION = 40
+API_VERSION = 42
 PKG_CLIENT_NAME = "pkg"
 
 class TestPkgApi(pkg5unittest.SingleDepotTestCase):
@@ -292,7 +292,11 @@
                 # First create the image and get v1 catalog.
                 self.dc.start()
                 self.pkgsend_bulk(self.durl, (self.foo10, self.quux10))
-                api_obj = self.image_create(self.durl, prefix="bobcat")
+                try:
+                        api_obj = self.image_create(self.durl, prefix="bobcat")
+                except api_errors.CatalogRefreshException, e:
+                        self.debug("\n".join(str(x[-1]) for x in e.failed))
+                        raise
 
                 self.pkg("publisher")
                 img = api_obj.img
@@ -939,6 +943,5 @@
                 api_obj.reset()
 
 
-
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/api/t_api_list.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/api/t_api_list.py	Thu Aug 19 23:33:49 2010 -0700
@@ -45,7 +45,7 @@
 import pkg.misc as misc
 import pkg.version as version
 
-API_VERSION = 40
+API_VERSION = 42
 PKG_CLIENT_NAME = "pkg"
 
 class TestApiList(pkg5unittest.ManyDepotTestCase):
@@ -244,7 +244,7 @@
                 # timestamps as those in the first ... by copying the repo over.
                 d1dir = self.dcs[1].get_repodir()
                 d2dir = self.dcs[2].get_repodir()
-                self.copy_repository(d1dir, "test1", d2dir, "test2")
+                self.copy_repository(d1dir, d2dir, { "test1": "test2" })
 
                 self.dlist1 = []
                 self.dlist2 = []
--- a/src/tests/api/t_api_search.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/api/t_api_search.py	Thu Aug 19 23:33:49 2010 -0700
@@ -491,6 +491,23 @@
                                     str(proposed_answer - correct_answer))
                         self.assertEqual(correct_answer, proposed_answer)
 
+        def _get_repo_index_dir(self):
+                depotpath = self.dc.get_repodir()
+                repo = self.dc.get_repo()
+                rstore = repo.get_pub_rstore("test")
+                return rstore.index_root
+
+        def _get_repo_writ_dir(self):
+                depotpath = self.dc.get_repodir()
+                repo = self.dc.get_repo()
+                rstore = repo.get_pub_rstore("test")
+                return rstore.writable_root
+
+        def _get_repo_catalog(self):
+                repo = self.dc.get_repo()
+                rstore = repo.get_pub_rstore("test")
+                return rstore.catalog
+
         @staticmethod
         def _replace_act(act):
                 if act.startswith('set name=pkg.fmri'):
@@ -1239,20 +1256,21 @@
         def pkgsend_bulk(self, durl, pkg, optional=True):
                 if pkg not in self.sent_pkgs or optional == False:
                         self.sent_pkgs.add(pkg)
-                        TestApiSearchBasics.pkgsend_bulk(self, durl, pkg)
+                        # Ensures indexing is done for every pkgsend.
+                        TestApiSearchBasics.pkgsend_bulk(self, durl, pkg,
+                            refresh_index=True)
+                        self.wait_repo(self.dc.get_repodir())
 
         def setUp(self):
                 TestApiSearchBasics.setUp(self)
                 durl = self.dc.get_depot_url()
-                self.pkgsend_bulk(durl, self.example_pkg10)
-                self.pkgsend_bulk(durl, self.fat_pkg10)
-                self.pkgsend_bulk(durl, self.another_pkg10)
+                self.pkgsend_bulk(durl, (self.example_pkg10, self.fat_pkg10,
+                    self.another_pkg10))
 
         def test_010_remote(self):
                 """Test remote search."""
                 durl = self.dc.get_depot_url()
                 api_obj = self.image_create(durl)
-                time.sleep(1)
                 # This should be a full test to test all functionality.
                 self._run_full_remote_tests(api_obj)
                 self._search_op(api_obj, True, ":file::", self.res_remote_file)
@@ -1505,7 +1523,6 @@
                 api_obj = self.image_create(durl)
 
                 self._api_install(api_obj, ["space_pkg"])
-                time.sleep(1)
 
                 self.pkgsend_bulk(durl, self.space_pkg10, optional=False)
                 api_obj.refresh(immediate=True)
@@ -1522,14 +1539,12 @@
                 self._search_op(api_obj, remote, 'unique_dir',
                     self.res_space_unique)
                 remote = True
-                time.sleep(1)
                 self._search_op(api_obj, remote, 'with', set())
                 self._search_op(api_obj, remote, 'with*',
                     self.res_space_with_star)
                 self._search_op(api_obj, remote, '*space',
                     self.res_space_space_star)
                 self._search_op(api_obj, remote, 'space', set())
-                time.sleep(1)
                 self.pkgsend_bulk(durl, self.space_pkg10, optional=False)
                 # Need to add install of subsequent package and
                 # local side search as well as remote
@@ -1725,11 +1740,8 @@
                             case_sensitive=False)
 
                 durl = self.dc.get_depot_url()
-                self.pkgsend_bulk(durl, self.cat_pkg10)
-                self.pkgsend_bulk(durl, self.cat2_pkg10)
-                self.pkgsend_bulk(durl, self.cat3_pkg10)
-                self.pkgsend_bulk(durl, self.bad_cat_pkg10)
-                self.pkgsend_bulk(durl, self.bad_cat2_pkg10)
+                self.pkgsend_bulk(durl, (self.cat_pkg10, self.cat2_pkg10,
+                    self.cat3_pkg10, self.bad_cat_pkg10, self.bad_cat2_pkg10))
                 api_obj = self.image_create(durl)
 
                 remote = True
@@ -1757,29 +1769,31 @@
                 """Checks whether incremental update generates wrong
                 additional lines."""
                 durl = self.dc.get_depot_url()
-                depotpath = self.dc.get_repodir()
-                ind_dir = os.path.join(depotpath, "index")
+                ind_dir = self._get_repo_index_dir()
                 tok_file = os.path.join(ind_dir, ss.BYTE_OFFSET_FILE)
                 main_file = os.path.join(ind_dir, ss.MAIN_FILE)
+
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                time.sleep(2)
                 fh = open(tok_file)
                 tok_1 = fh.readlines()
                 tok_len = len(tok_1)
                 fh.close()
+
                 fh = open(main_file)
                 main_1 = fh.readlines()
                 main_len = len(main_1)
+
                 self.pkgsend_bulk(durl, self.example_pkg10, optional=False)
-                time.sleep(2)
                 fh = open(tok_file)
                 tok_2 = fh.readlines()
                 new_tok_len = len(tok_2)
                 fh.close()
+
                 fh = open(main_file)
                 main_2 = fh.readlines()
                 new_main_len = len(main_2)
                 fh.close()
+
                 # Since the server now adds a set action for the FMRI to
                 # manifests during publication, there should be one
                 # additional line for the token file.
@@ -1790,7 +1804,6 @@
                 """Test for known bug 983."""
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.bug_983_manifest)
-                time.sleep(2)
                 api_obj = self.image_create(durl)
 
                 self._search_op(api_obj, True, "gmake", self.res_bug_983)
@@ -1839,9 +1852,8 @@
                 """Tests that field queries and phrase queries work together.
                 """
                 durl = self.dc.get_depot_url()
-                self.pkgsend_bulk(durl, self.bug_8492_manf_1)
-                self.pkgsend_bulk(durl, self.bug_8492_manf_2)
-                time.sleep(2)
+                self.pkgsend_bulk(durl, (self.bug_8492_manf_1,
+                    self.bug_8492_manf_2))
                 api_obj = self.image_create(durl)
 
                 self._search_op(api_obj, True, "set::'image packaging'",
@@ -2046,9 +2058,6 @@
                 self.pkgsend_bulk(durl, self.hierarchical_named_pkg)
                 api_obj = self.image_create(durl)
 
-                # XXX wait for depot to be ready.
-                time.sleep(1)
-
                 remote = True
                 run_tests(api_obj, remote)
                 self._api_install(api_obj, ["pfoo"])
@@ -2064,6 +2073,12 @@
                 self.debug_features = ["headers"]
                 TestApiSearchBasics.setUp(self)
 
+        def pkgsend_bulk(self, durl, pkg):
+                # Ensures indexing is done for every pkgsend.
+                TestApiSearchBasics.pkgsend_bulk(self, durl, pkg,
+                    refresh_index=True)
+                self.wait_repo(self.dc.get_repodir())
+
         def test_local_image_update(self):
                 """Test that the index gets updated by image-update and
                 that rebuilding the index works after updating the
@@ -2086,50 +2101,10 @@
 
                 self._run_local_tests_example11_installed(api_obj)
 
-        def test_bug_4048_1(self):
-                """Checks whether the server deals with partial indexing."""
-                durl = self.dc.get_depot_url()
-                depotpath = self.dc.get_repodir()
-                tmp_dir = os.path.join(depotpath, "index", "TMP")
-                os.mkdir(tmp_dir)
-                self.pkgsend_bulk(durl, self.example_pkg10)
-                api_obj = self.image_create(durl)
-                self._run_remote_empty_tests(api_obj)
-                os.rmdir(tmp_dir)
-                offset = 2
-                depot_logfile = os.path.join(self.test_root,
-                    "depot_logfile%d" % offset)
-                tmp_dc = self.prep_depot(12000 + offset, depotpath,
-                    depot_logfile, refresh_index=True, start=True)
-                time.sleep(1)
-                # This should do something other than sleep for 1 sec
-                self._run_remote_tests(api_obj)
-                tmp_dc.kill()
-
-        def test_bug_4048_2(self):
-                """Checks whether the server deals with partial indexing."""
-                durl = self.dc.get_depot_url()
-                depotpath = self.dc.get_repodir()
-                tmp_dir = os.path.join(depotpath, "index", "TMP")
-                os.mkdir(tmp_dir)
-                self.pkgsend_bulk(durl, self.space_pkg10)
-                api_obj = self.image_create(durl)
-                self._run_remote_empty_tests(api_obj)
-                os.rmdir(tmp_dir)
-                self.pkgsend_bulk(durl, self.example_pkg10)
-                time.sleep(2)
-                self._run_remote_tests(api_obj)
-                self._search_op(api_obj, True, "unique_dir",
-                    self.res_space_unique)
-                self._search_op(api_obj, True, "with*",
-                    self.res_space_with_star)
-
         def test_bug_6177(self):
                 durl = self.dc.get_depot_url()
-                self.pkgsend_bulk(durl, self.example_pkg10)
-                self.pkgsend_bulk(durl, self.example_pkg11)
-                self.pkgsend_bulk(durl, self.incorp_pkg10)
-                self.pkgsend_bulk(durl, self.incorp_pkg11)
+                self.pkgsend_bulk(durl, (self.example_pkg10, self.example_pkg11,
+                    self.incorp_pkg10, self.incorp_pkg11))
                 api_obj = self.image_create(durl)
 
                 res_both_actions = set([
@@ -2239,43 +2214,44 @@
                 self._search_op(api_obj, True, "</bin>", res_both_packages,
                     return_actions=False, prune_versions=False)
 
-        def __corrupt_depot(self, ind_dir):
+        def __corrupt_depot(self, root):
                 self.dc.stop()
-                if os.path.exists(os.path.join(ind_dir, ss.MAIN_FILE)):
-                        shutil.move(os.path.join(ind_dir, ss.MAIN_FILE),
-                            os.path.join(ind_dir, "main_dict.ascii.v1"))
+                for entry in os.walk(root):
+                        dirpath, dirnames, fnames = entry
+                        if ss.MAIN_FILE in fnames:
+                                src = os.path.join(dirpath, ss.MAIN_FILE)
+                                dest = os.path.join(dirpath,
+                                    "main_dict.ascii.v1")
+                                self.debug("moving %s to %s" % (src, dest))
+                                shutil.move(src, dest)
                 self.dc.start()
 
-        def __wait_for_indexing(self, d):
-                init_time = time.time()
-                there = True
-                while there and ((time.time() - init_time) < 10):
-                        there = os.path.exists(d)
-                self.assert_(not there)
-                time.sleep(1)
-
         def test_bug_7358_1(self):
                 """Move files so that an inconsistent index is created and
                 check that the server rebuilds the index when possible, and
                 doesn't stack trace when it can't write to the directory."""
 
                 durl = self.dc.get_depot_url()
-                depotpath = self.dc.get_repodir()
-                ind_dir = os.path.join(depotpath, "index")
+                repo_path = self.dc.get_repodir()
+
                 api_obj = self.image_create(durl)
                 # Check when depot is empty.
-                self.__corrupt_depot(ind_dir)
-                self.__wait_for_indexing(os.path.join(ind_dir, "TMP"))
+                self.__corrupt_depot(repo_path)
+                repo = self.dc.get_repo() # Every time to ensure current state.
+                repo.refresh_index()
                 # Since the depot is empty, should return no results but
                 # not error.
                 self._search_op(api_obj, True, 'e*', set())
 
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                self.__wait_for_indexing(os.path.join(ind_dir, "TMP"))
+                repo = self.dc.get_repo() # Every time to ensure current state.
+                repo.refresh_index()
+                self.dc.refresh()
 
                 # Check when depot contains a package.
-                self.__corrupt_depot(ind_dir)
-                self.__wait_for_indexing(os.path.join(ind_dir, "TMP"))
+                self.__corrupt_depot(repo_path)
+                repo = self.dc.get_repo() # Every time to ensure current state.
+                repo.refresh_index()
                 self._run_remote_tests(api_obj)
 
         def test_bug_7358_2(self):
@@ -2283,30 +2259,28 @@
                 with writable root."""
 
                 durl = self.dc.get_depot_url()
-                depotpath = self.dc.get_repodir()
-                ind_dir = os.path.join(depotpath, "index")
-                shutil.rmtree(ind_dir)
+                repo_path = self.dc.get_repodir()
+                ind_dir = self._get_repo_index_dir()
+                if os.path.exists(ind_dir):
+                        shutil.rmtree(ind_dir)
                 writable_root = os.path.join(self.test_root,
                     "writ_root")
-                writ_dir = os.path.join(writable_root, "index")
                 self.dc.set_writable_root(writable_root)
 
                 api_obj = self.image_create(durl)
 
                 # Check when depot is empty.
+                writ_dir = self._get_repo_writ_dir()
                 self.__corrupt_depot(writ_dir)
                 # Since the depot is empty, should return no results but
                 # not error.
                 self.assert_(not os.path.isdir(ind_dir))
-                self.__wait_for_indexing(os.path.join(writ_dir, "TMP"))
                 self._search_op(api_obj, True, 'e*', set())
 
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                self.__wait_for_indexing(os.path.join(writ_dir, "TMP"))
 
                 # Check when depot contains a package.
                 self.__corrupt_depot(writ_dir)
-                self.__wait_for_indexing(os.path.join(writ_dir, "TMP"))
                 self.assert_(not os.path.isdir(ind_dir))
                 self._run_remote_tests(api_obj)
 
@@ -2318,9 +2292,6 @@
                 for p in api_obj.img.gen_publishers():
                         uuids.append(p.client_uuid)
 
-                # XXX wait for depot to be ready.
-                time.sleep(1)
-
                 self._search_op(api_obj, True, "example_path",
                     self.res_remote_path)
                 self._search_op(api_obj, True, "example_path",
@@ -2427,37 +2398,36 @@
                 frequently than they should."""
 
                 durl = self.dc.get_depot_url()
-                depotpath = self.dc.get_repodir()
-                ind_dir = os.path.join(depotpath, "index")
-                repo = srepo.Repository(repo_root=depotpath, read_only=True,
-                    fork_allowed=False, refresh_index=False)
+                ind_dir = self._get_repo_index_dir()
 
                 # Check that an empty index works correctly.
-                fmris = indexer.Indexer.check_for_updates(ind_dir, repo.catalog)
+                fmris = indexer.Indexer.check_for_updates(ind_dir,
+                    self._get_repo_catalog())
                 self.assertEqual(set(), fmris)
 
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                self.__wait_for_indexing(os.path.join(ind_dir, "TMP"))
-                repo = srepo.Repository(repo_root=depotpath, fork_allowed=False)
-                self.assertEqual(len(set(repo.catalog.fmris())), 1)
+                cat = self._get_repo_catalog()
+                self.assertEqual(len(set(cat.fmris())), 1)
                 # Check that after publishing one package, no packages need
                 # indexing.
-                fmris = indexer.Indexer.check_for_updates(ind_dir, repo.catalog)
+                fmris = indexer.Indexer.check_for_updates(ind_dir,
+                    self._get_repo_catalog())
                 self.assertEqual(set(), fmris)
                 
                 back_dir = ind_dir + ".BACKUP"
                 shutil.copytree(ind_dir, back_dir)
                 self.pkgsend_bulk(durl, self.example_pkg10)
-                repo = srepo.Repository(repo_root=depotpath, fork_allowed=False)
-                self.assertEqual(len(set(repo.catalog.fmris())), 2)
+                cat = self._get_repo_catalog()
+                self.assertEqual(len(set(cat.fmris())), 2)
                 # Check that publishing a second package also works.
-                fmris = indexer.Indexer.check_for_updates(ind_dir, repo.catalog)
+                fmris = indexer.Indexer.check_for_updates(ind_dir,
+                    self._get_repo_catalog())
                 self.assertEqual(set(), fmris)
 
                 # Check that a package that was publisher but not index is
                 # reported.
                 fmris = indexer.Indexer.check_for_updates(back_dir,
-                    repo.catalog)
+                    self._get_repo_catalog())
                 self.assertEqual(len(fmris), 1)
 
 
@@ -2480,7 +2450,9 @@
                 self.durl1 = self.dcs[1].get_depot_url()
                 self.durl2 = self.dcs[2].get_depot_url()
                 self.durl3 = self.dcs[3].get_depot_url()
-                self.pkgsend_bulk(self.durl2, self.example_pkg10)
+                self.pkgsend_bulk(self.durl2, self.example_pkg10,
+                    refresh_index=True)
+                self.wait_repo(self.dcs[2].get_repodir())
 
                 self.image_create(self.durl1, prefix="test1")
                 self.pkg("set-publisher -O " + self.durl2 + " test2")
--- a/src/tests/api/t_file_manager.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/api/t_file_manager.py	Thu Aug 19 23:33:49 2010 -0700
@@ -101,10 +101,6 @@
                 t = tempfile.gettempdir()
                 no_dir = os.path.join(t, "not_exist")
 
-                self.check_exception(file_manager.FileManager,
-                    file_manager.NeedToModifyReadOnlyFileManager,
-                    ["create", no_dir], no_dir, readonly=True)
-
                 # Test that a read only FileManager won't modify the file
                 # system.
                 fm = file_manager.FileManager(self.base_dir, readonly=True)
--- a/src/tests/api/t_pkg_api_install.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/api/t_pkg_api_install.py	Thu Aug 19 23:33:49 2010 -0700
@@ -219,7 +219,8 @@
                 """ Send package [email protected], containing a directory and a file,
                     install, search, and uninstall. """
 
-                self.pkgsend_bulk(self.rurl, (self.foo10, self.foo11))
+                self.pkgsend_bulk(self.rurl, (self.foo10, self.foo11),
+                    refresh_index=True)
                 api_obj = self.image_create(self.rurl)
 
                 self.pkg("list -a")
@@ -230,8 +231,8 @@
 
                 self.pkg("search -l /lib/libc.so.1")
                 self.pkg("search -r /lib/libc.so.1")
-                self.pkg("search -l blah", exit = 1)
-                self.pkg("search -r blah", exit = 1)
+                self.pkg("search -l blah", exit=1)
+                self.pkg("search -r blah", exit=1)
 
                 # check to make sure timestamp was set to correct value
                 libc_path = os.path.join(self.get_img_path(), "lib/libc.so.1")
@@ -260,12 +261,12 @@
                 self.__do_install(api_obj, ["[email protected]"])
 
                 self.pkg("list [email protected]")
-                self.pkg("list [email protected]", exit = 1)
+                self.pkg("list [email protected]", exit=1)
 
                 api_obj.reset()
                 self.__do_install(api_obj, ["[email protected]"])
                 self.pkg("list [email protected]")
-                self.pkg("list [email protected]", exit = 1)
+                self.pkg("list [email protected]", exit=1)
                 self.pkg("list foo@1")
                 self.pkg("verify")
 
@@ -291,10 +292,28 @@
                 self.__do_uninstall(api_obj, ["bar", "foo"])
 
                 # foo and bar should not be installed at this point
-                self.pkg("list bar", exit = 1)
-                self.pkg("list foo", exit = 1)
+                self.pkg("list bar", exit=1)
+                self.pkg("list foo", exit=1)
                 self.pkg("verify")
 
+        def test_multi_publisher(self):
+                """ Verify that package install works as expected when multiple
+                publishers share the same repository. """
+
+                # Publish a package for 'test'.
+                self.pkgsend_bulk(self.rurl, self.bar10)
+
+                # Now change the default publisher to 'test2' and publish
+                # another package.
+                self.pkgrepo("set -s %s publisher/prefix=test2" % self.rurl)
+                self.pkgsend_bulk(self.rurl, self.foo10)
+
+                # Finally, create an image and verify that packages from
+                # both publishers may be installed.
+                api_obj = self.image_create(self.rurl, prefix=None)
+                self.__do_install(api_obj, ["pkg://test/[email protected]",
+                    "pkg://test2/[email protected]"])
+
         def test_pkg_file_errors(self):
                 """ Verify that package install and uninstall works as expected
                 when files or directories are missing. """
@@ -483,8 +502,8 @@
                 # be removed by this action.
                 self.__do_uninstall(api_obj, ["foo"], True)
 
-                self.pkg("list bar", exit = 1)
-                self.pkg("list foo", exit = 1)
+                self.pkg("list bar", exit=1)
+                self.pkg("list foo", exit=1)
 
         def test_nonrecursive_dependent_uninstall(self):
                 """Trying to remove a package that's a dependency of another
--- a/src/tests/cli/t_pkg_depotd.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_depotd.py	Thu Aug 19 23:33:49 2010 -0700
@@ -39,6 +39,7 @@
 import urllib2
 import urlparse
 
+import pkg.client.publisher as publisher
 import pkg.config as cfg
 import pkg.depotcontroller as dc
 import pkg.fmri as fmri
@@ -416,16 +417,24 @@
                 self.__dc.set_depotd_content_root(pkg5unittest.g_proto_area + \
                     "/usr/share/lib/pkg")
 
-                depotpath = os.path.join(self.test_root, "depot")
+                repopath = os.path.join(self.test_root, "repo")
                 logpath = os.path.join(self.test_root, self.id())
+                self.create_repo(repopath, properties={ "publisher": {
+                    "prefix": "test" }})
+                self.__dc.set_repodir(repopath)
+                self.__dc.set_logpath(logpath)
 
-                try:
-                        os.makedirs(depotpath, misc.PKG_DIR_MODE)
-                except:
-                        pass
+        def _get_repo_index_dir(self):
+                depotpath = self.__dc.get_repodir()
+                repo = self.__dc.get_repo()
+                rstore = repo.get_pub_rstore("test")
+                return rstore.index_root
 
-                self.__dc.set_repodir(depotpath)
-                self.__dc.set_logpath(logpath)
+        def _get_repo_writ_dir(self):
+                depotpath = self.__dc.get_repodir()
+                repo = self.__dc.get_repo()
+                rstore = repo.get_pub_rstore("test")
+                return rstore.writable_root
 
         def tearDown(self):
                 pkg5unittest.CliTestCase.tearDown(self)
@@ -454,38 +463,42 @@
                 self.make_misc_files(TestPkgDepot.misc_files)
                 writable_root = os.path.join(self.test_root,
                     "writ_root")
-                index_dir = os.path.join(writable_root, "index")
-                feed = os.path.join(writable_root, "feed.xml")
-                base_dir = os.path.join(self.test_root, "depot")
-                o_index_dir = os.path.join(base_dir, "index")
-                o_feed = os.path.join(base_dir, "feed.xml")
+                o_index_dir = os.path.join(self._get_repo_index_dir(), "index")
 
                 timeout = 10
 
                 def check_state(check_feed):
+                        index_dir = os.path.join(self._get_repo_writ_dir(),
+                            "index")
+                        feed = os.path.join(writable_root, "publisher", "test",
+                            "feed.xml")
                         found = not os.path.exists(o_index_dir) and \
-                            not os.path.exists(o_feed) and \
                             os.path.isdir(index_dir) and \
                             (not check_feed or os.path.isfile(feed))
                         start_time = time.time()
                         while not found and time.time() - start_time < timeout:
                                 time.sleep(1)
                                 found = not os.path.exists(o_index_dir) and \
-                                    not os.path.exists(o_feed) and \
                                     os.path.isdir(index_dir) and \
                                     (not check_feed or os.path.isfile(feed))
 
                         self.assert_(not os.path.exists(o_index_dir))
-                        self.assert_(not os.path.exists(o_feed))
                         self.assert_(os.path.isdir(index_dir))
                         if check_feed:
-                                self.assert_(os.path.isfile(feed))
-                def get_feed(durl):
+                                try:
+                                        self.assert_(os.path.isfile(feed))
+                                except:
+                                        raise RuntimeError("Feed cache file "
+                                            "not found at '%s'." % feed)
+                def get_feed(durl, pub=""):
                         start_time = time.time()
                         got = False
                         while not got and (time.time() - start_time) < timeout:
+                                if pub:
+                                        pub = "%s/" % pub
                                 try:
-                                        urllib2.urlopen("%s/feed" % durl)
+                                        urllib2.urlopen("%s%s/feed" % (durl,
+                                            pub))
                                         got = True
                                 except urllib2.HTTPError, e:
                                         self.debug(str(e))
@@ -493,17 +506,27 @@
                         self.assert_(got)
 
                 self.__dc.set_port(12000)
+                durl = self.__dc.get_depot_url()
+
+                repo = self.__dc.get_repo()
+                pub = repo.get_publisher("test")
+                pub_repo = pub.selected_repository
+                if not pub_repo:
+                        pub_repo = publisher.Repository()
+                        while pub.repositories:
+                                pub.repositories.pop()
+                        pub.repositories.append(pub_repo)
+                pub_repo.origins = [durl]
+                repo.update_publisher(pub)
+
                 self.__dc.set_writable_root(writable_root)
-                durl = self.__dc.get_depot_url()
                 self.__dc.set_property("publisher", "prefix", "test")
-                self.__dc.set_property("repository", "origins", durl)
                 self.__dc.start()
                 check_state(False)
-                self.pkgsend_bulk(durl, TestPkgDepot.quux10)
+                self.pkgsend_bulk(durl, TestPkgDepot.quux10, refresh_index=True)
                 get_feed(durl)
                 check_state(True)
 
-                self.__dc.wait_search()
                 self.image_create(durl)
                 self.pkg("search -r cat")
                 self.__dc.stop()
@@ -645,45 +668,38 @@
 
         repo_cfg = {
             "publisher": {
-                "alias": "pending",
                 "prefix": "org.opensolaris.pending"
             },
-            "repository": {
-                "collection_type": "supplemental",
-                "description":
-                    "Development packages for the contrib repository.",
-                "legal_uris": [
-                    "http://www.opensolaris.org/os/copyrights",
-                    "http://www.opensolaris.org/os/tou",
-                    "http://www.opensolaris.org/os/trademark"
-                ],
-                "mirrors": [],
-                "name": """"Pending" Repository""",
-                "origins": [],  # Has to be set during setUp for correct origin.
-                "refresh_seconds": 86400,
-                "registration_uri": "",
-                "related_uris": [
-                    "http://jucr.opensolaris.org/contrib",
-                    "http://jucr.opensolaris.org/pending",
-                    "http://pkg.opensolaris.org/contrib",
-                ]
-            }
+        }
+
+        pub_repo_cfg = {
+            "collection_type": "supplemental",
+            "description":
+                "Development packages for the contrib repository.",
+            "legal_uris": [
+                "http://www.opensolaris.org/os/copyrights",
+                "http://www.opensolaris.org/os/tou",
+                "http://www.opensolaris.org/os/trademark"
+            ],
+            "mirrors": [],
+            "name": """"Pending" Repository""",
+            "origins": [],  # Has to be set during setUp for correct origin.
+            "refresh_seconds": 86400,
+            "registration_uri": "",
+            "related_uris": [
+                "http://jucr.opensolaris.org/contrib",
+                "http://jucr.opensolaris.org/pending",
+                "http://pkg.opensolaris.org/contrib",
+            ]
         }
 
         def setUp(self):
-                pkg5unittest.SingleDepotTestCase.setUp(self, start_depot=True)
-
-                # All of the tests will start depot if needed.
-                self.dc.stop()
+                pkg5unittest.SingleDepotTestCase.setUp(self)
 
                 # Prevent override of custom configuration;
                 # tests will set as needed.
                 self.dc.clear_property("publisher", "prefix")
 
-                # Set repository origins.
-                self.repo_cfg["repository"]["origins"] = \
-                    [self.dc.get_depot_url()]
-
                 self.tpath = tempfile.mkdtemp(prefix="tpath",
                     dir=self.test_root)
 
@@ -693,7 +709,8 @@
                 """Verify that a non-error response and valid HTML is returned
                 for each known BUI page in every available depot mode."""
 
-                self.dc.set_property("publisher", "prefix", "test")
+                pub = "test"
+                self.dc.set_property("publisher", "prefix", pub)
 
                 # A list of tuples containing the name of the method used to set
                 # the mode, and then the method needed to unset that mode.
@@ -712,15 +729,19 @@
                     "en/stats.shtml",
                 ]
 
+                repodir = self.dc.get_repodir()
+                durl = self.dc.get_depot_url()
                 for with_packages in (False, True):
-                        shutil.rmtree(self.dc.get_repodir(),
-                            ignore_errors=True)
+                        shutil.rmtree(repodir, ignore_errors=True)
+
+                        # Create repository and set publisher origins.
+                        self.create_repo(self.dc.get_repodir())
+                        self.pkgrepo("set -s %(repodir)s -p %(pub)s "
+                            "repository/origins=%(durl)s" % locals())
 
                         if with_packages:
                                 self.dc.set_readwrite()
-                                self.dc.set_port(12000)
                                 self.dc.start()
-                                durl = self.dc.get_depot_url()
                                 self.pkgsend_bulk(durl, (self.info10,
                                     self.quux10, self.system10))
                                 self.dc.stop()
@@ -729,10 +750,7 @@
                                 if set_method:
                                         getattr(self.dc, set_method)()
 
-                                self.dc.set_port(12000)
                                 self.dc.start()
-                                durl = self.dc.get_depot_url()
-
                                 for path in pages:
                                         # Any error responses will cause an
                                         # exception.
@@ -755,6 +773,7 @@
                 """Helper function to generate test repository configuration."""
                 # Find and load the repository configuration.
                 rpath = self.dc.get_repodir()
+                assert os.path.isdir(rpath)
                 rcpath = os.path.join(rpath, "cfg_cache")
 
                 rc = sr.RepositoryConfig(target=rcpath)
@@ -769,6 +788,25 @@
                 # Save it.
                 rc.write()
 
+                # Apply publisher properties and update.
+                repo = self.dc.get_repo()
+                try:
+                        pub = repo.get_publisher("org.opensolaris.pending")
+                except sr.RepositoryUnknownPublisher:
+                        pub = publisher.Publisher("org.opensolaris.pending")
+                        repo.add_publisher(pub)
+
+                pub_repo = pub.selected_repository
+                if not pub_repo:
+                        pub_repo = publisher.Repository()
+                        while pub.repositories:
+                                pub.repositories.pop()
+                        pub.repositories.append(pub_repo)
+
+                for attr, val in self.pub_repo_cfg.iteritems():
+                        setattr(pub_repo, attr, val)
+                repo.update_publisher(pub)
+
         def test_1_depot_publisher(self):
                 """Verify the output of the depot /publisher operation."""
 
@@ -781,18 +819,20 @@
 
                 durl = self.dc.get_depot_url()
                 purl = urlparse.urljoin(durl, "publisher/0")
-                pub, pkglist = p5i.parse(location=purl)[0]
+                entries = p5i.parse(location=purl)
+                assert entries[0][0].prefix == "test"
+                assert entries[1][0].prefix == "org.opensolaris.pending"
 
                 # Now verify that the parsed response has the expected data.
+                pub, pkglist = entries[-1]
                 cfgdata = self.repo_cfg
                 for prop in cfgdata["publisher"]:
                         self.assertEqual(getattr(pub, prop),
                             cfgdata["publisher"][prop])
 
                 repo = pub.selected_repository
-                for prop in cfgdata["repository"]:
+                for prop, expected in self.pub_repo_cfg.iteritems():
                         returned = getattr(repo, prop)
-                        expected = cfgdata["repository"][prop]
                         if prop.endswith("uris") or prop == "origins":
                                 uris = []
                                 for u in returned:
@@ -865,8 +905,8 @@
                 self.dc.start()
 
                 durl = self.dc.get_depot_url()
-                pfmri = fmri.PkgFmri(self.pkgsend_bulk(durl, self.file10)[0],
-                    "5.11")
+                pfmri = fmri.PkgFmri(self.pkgsend_bulk(durl, self.file10,
+                    refresh_index=True)[0], "5.11")
 
                 def get_headers(req_path):
                         try:
@@ -912,9 +952,8 @@
 
                 # Then, publish some packages we can abuse for testing.
                 durl = self.dc.get_depot_url()
-                plist = self.pkgsend_bulk(durl, self.quux10)
+                plist = self.pkgsend_bulk(durl, self.quux10, refresh_index=True)
 
-                self.dc.wait_search()
                 surl = urlparse.urljoin(durl,
                     "en/search.shtml?action=Search&token=*")
                 urllib2.urlopen(surl).read()
--- a/src/tests/cli/t_pkg_history.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_history.py	Thu Aug 19 23:33:49 2010 -0700
@@ -62,7 +62,7 @@
                 # as those in the first ... by duplicating the repo.
                 d1dir = self.dcs[1].get_repodir()
                 d2dir = self.dcs[2].get_repodir()
-                self.copy_repository(d1dir, "test1", d2dir, "test2")
+                self.copy_repository(d1dir, d2dir, { "test1": "test2" })
                 self.dcs[2].get_repo(auto_create=True).rebuild()
 
                 self.image_create(rurl1, prefix="test1")
--- a/src/tests/cli/t_pkg_image_update.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_image_update.py	Thu Aug 19 23:33:49 2010 -0700
@@ -118,8 +118,9 @@
 
                 # Copy contents of repository 2 to repos 4 and 5.
                 for i in (4, 5):
-                        self.copy_repository(self.dcs[2].get_repodir(), "test1",
-                                self.dcs[i].get_repodir(), "test%d" % i)
+                        self.copy_repository(self.dcs[2].get_repodir(),
+                                self.dcs[i].get_repodir(),
+                                { "test1": "test%d" % i })
                         self.dcs[i].get_repo(auto_create=True).rebuild()
 
         def test_image_update_bad_opts(self):
--- a/src/tests/cli/t_pkg_install.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_install.py	Thu Aug 19 23:33:49 2010 -0700
@@ -175,7 +175,6 @@
             add file tmp/cat mode=0555 owner=root group=bin path=/bin/cat
             close """
 
-
         misc_files = [ "tmp/libc.so.1", "tmp/cat", "tmp/baz" ]
 
         def setUp(self):
@@ -218,7 +217,8 @@
                 """ Send package [email protected], containing a directory and a file,
                     install, search, and uninstall. """
 
-                self.pkgsend_bulk(self.rurl, (self.foo10, self.foo11))
+                self.pkgsend_bulk(self.rurl, (self.foo10, self.foo11),
+                    refresh_index=True)
                 self.image_create(self.rurl)
 
                 self.pkg("list -a")
@@ -3161,7 +3161,7 @@
                 # Copy contents of test1's repo to a repo for test3.
                 d1dir = self.dcs[1].get_repodir()
                 d2dir = self.dcs[7].get_repodir()
-                self.copy_repository(d1dir, "test1", d2dir, "test3")
+                self.copy_repository(d1dir, d2dir, { "test1": "test3" })
                 self.dcs[7].get_repo(auto_create=True).rebuild()
 
                 # Create image and hence primary publisher
@@ -4932,7 +4932,7 @@
                 with open(mpath, "rb") as mfile:
                         mcontent = mfile.read()
 
-                cat = repo.catalog
+                cat = repo.get_catalog("test")
                 cat.log_updates = False
 
                 # Update the catalog signature.
--- a/src/tests/cli/t_pkg_list.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_list.py	Thu Aug 19 23:33:49 2010 -0700
@@ -81,7 +81,7 @@
                 # send those changes after restarting depot 2.
                 d1dir = self.dcs[1].get_repodir()
                 d2dir = self.dcs[2].get_repodir()
-                self.copy_repository(d1dir, "test1", d2dir, "test2")
+                self.copy_repository(d1dir, d2dir, { "test1": "test2" })
 
                 # The new repository won't have a catalog, so rebuild it.
                 self.dcs[2].get_repo(auto_create=True).rebuild()
@@ -299,22 +299,6 @@
                 # packages in the specified repository are for publisher test2.
                 self.pkg("unset-publisher test2")
 
-                # A refresh has to be prevented here as set-publisher will not
-                # allow the provided repository URI as it is for a different
-                # publisher.  However, since this test is checking for the case
-                # where a different publisher's data is now being used for
-                # a publisher, this can be worked around.
-                self.pkg("set-publisher --no-refresh -O %s test3" % self.rurl2)
-                self.pkg("refresh test3")
-                self.pkg("list -aHf [email protected]")
-                expected = \
-                    "foo 1.0-0 known u----\n" + \
-                    "foo (test2) 1.0-0 installed u----\n"
-                output = self.reduceSpaces(self.output)
-                self.assertEqualDiff(expected, output)
-                self.pkg("unset-publisher test3")
-                self.pkg("set-publisher -O %s test2" % self.rurl2)
-
                 # Uninstall the package so any remaining tests won't be
                 # impacted.
                 self.pkg("uninstall pkg://test2/[email protected]")
@@ -542,7 +526,7 @@
                 with open(mpath, "rb") as mfile:
                         mcontent = mfile.read()
 
-                cat = repo.catalog
+                cat = repo.get_catalog("test")
                 cat.log_updates = False
 
                 # Update the catalog signature.
--- a/src/tests/cli/t_pkg_publisher.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_publisher.py	Thu Aug 19 23:33:49 2010 -0700
@@ -395,15 +395,19 @@
             "repository": {
                 "collection_type": "supplemental",
                 "description": "This repository serves packages for test3.",
-                "legal_uris": "http://www.opensolaris.org/os/copyrights,"
-                        "http://www.opensolaris.org/os/tou,"
-                        "http://www.opensolaris.org/os/trademark",
+                "legal_uris": [
+                    "http://www.opensolaris.org/os/copyrights",
+                    "http://www.opensolaris.org/os/tou",
+                    "http://www.opensolaris.org/os/trademark"
+                ],
                 "name": "The Test3 Repository",
                 "refresh_seconds": 86400,
                 "registration_uri": "",
-                "related_uris": "http://pkg.opensolaris.org/contrib,"
-                        "http://jucr.opensolaris.org/pending,"
-                        "http://jucr.opensolaris.org/contrib",
+                "related_uris": [
+                    "http://pkg.opensolaris.org/contrib",
+                    "http://jucr.opensolaris.org/pending",
+                    "http://jucr.opensolaris.org/contrib"
+                ],
             },
         }
 
@@ -511,7 +515,7 @@
                                         if not val:
                                                 val = set()
                                         else:
-                                                val = set(val.split(","))
+                                                val = set(val)
                                         new_pub_val = set()
                                         for u in pub_val:
                                                 uri = u.uri
@@ -521,13 +525,32 @@
                                         pub_val = new_pub_val
                                 self.assertEqual(val, pub_val)
 
+        def __update_repo_pub_cfg(self, dc, pubcfg):
+                """Private helper method to update a repository's publisher
+                configuration based on the provided dictionary structure."""
+
+                rpath = dc.get_repodir()
+                props = ""
+                for sname in pubcfg:
+                        for pname, pval in pubcfg[sname].iteritems():
+                                if sname == "publisher" and pname == "prefix":
+                                        continue
+                                pname = pname.replace("_", "-")
+                                if isinstance(pval, list):
+                                        props += "%s/%s='(%s)' " % \
+                                            (sname, pname, " ".join(pval))
+                                else:
+                                        props += "%s/%s='%s' " % \
+                                            (sname, pname, pval)
+
+                pfx = pubcfg["publisher"]["prefix"]
+                self.pkgrepo("set -s %s -p %s %s" % (rpath, pfx, props))
+                self.pkgrepo("get -p all -s %s" % rpath)
+
         def test_set_auto(self):
                 """Verify that set-publisher -p works as expected."""
 
-                # XXX can't test multiple publisher configuration case as
-                # depot doesn't support that yet (i.e. publisher/0 response
-                # does not contain multiple publishers).  So this only tests
-                # the single add/update case for the moment.
+                # Test the single add/update case first.
                 durl1 = self.dcs[1].get_depot_url()
                 durl3 = self.dcs[3].get_depot_url()
                 durl4 = self.dcs[4].get_depot_url()
@@ -550,7 +573,7 @@
                         "prefix": "test3",
                     },
                     "repository": {
-                        "origins": durl3,
+                        "origins": [durl3],
                     },
                 }
                 self.pkg("set-publisher -p %s" % durl3)
@@ -566,11 +589,9 @@
                 # Origin and mirror info wasn't known until this point, so add
                 # it to the test configuration.
                 t3cfg = self.test3_pub_cfg.copy()
-                t3cfg["repository"]["origins"] = durl3
-                t3cfg["repository"]["mirrors"] = ",".join((durl1, durl3, durl4))
-                for section in t3cfg:
-                        for prop, val in t3cfg[section].iteritems():
-                                self.dcs[3].set_property(section, prop, val)
+                t3cfg["repository"]["origins"] = [durl3]
+                t3cfg["repository"]["mirrors"] = [durl1, durl3, durl4]
+                self.__update_repo_pub_cfg(self.dcs[3], t3cfg)
                 self.dcs[3].start()
 
                 # Should succeed and configure test3 publisher.
@@ -598,12 +619,9 @@
                                         # Clear all other props.
                                         val = ""
                                 t6cfg[section][prop] = val
-                t6cfg["repository"]["origins"] = ",".join((durl3, durl6))
-                t6cfg["repository"]["mirrors"] = ",".join((durl1, durl3, durl4,
-                    durl6))
-                for section in t6cfg:
-                        for prop, val in t6cfg[section].iteritems():
-                                self.dcs[6].set_property(section, prop, val)
+                t6cfg["repository"]["origins"] = [durl3, durl6]
+                t6cfg["repository"]["mirrors"] = [durl1, durl3, durl4, durl6]
+                self.__update_repo_pub_cfg(self.dcs[6], t6cfg)
                 self.dcs[6].start()
 
                 # Should fail since even though repository publisher prefix
@@ -619,6 +637,14 @@
                 # as expected.
                 self.__verify_pub_cfg("test3", t6cfg)
 
+                # Test multi-publisher add case.
+                self.pkgrepo("set -s %s -p test2 publisher/alias=''" %
+                    self.dcs[6].get_repodir())
+                self.pkg("unset-publisher test3")
+                self.dcs[6].refresh()
+                self.pkg("set-publisher -p %s" % durl6)
+                self.pkg("publisher test3 test2")
+
         def test_set_mirrors_origins(self):
                 """Test set-publisher functionality for mirrors and origins."""
 
--- a/src/tests/cli/t_pkg_refresh.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_refresh.py	Thu Aug 19 23:33:49 2010 -0700
@@ -390,8 +390,8 @@
 
                 # A bit hacky, but load the repository's catalog directly
                 # and then get the list of updates files it has created.
-                cat_root = os.path.join(dc.get_repodir(), "catalog")
-                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                repo = dc.get_repo()
+                v1_cat = repo.get_catalog("test1")
                 update = v1_cat.updates.keys()[-1]
 
                 # All of the entries from the previous operations, and then
@@ -432,7 +432,8 @@
                 # of the log so far, so expected needs to be reset and the
                 # catalog reloaded.
                 expected = []
-                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                repo = dc.get_repo()
+                v1_cat = repo.get_catalog("test1")
 
                 dc.stop()
                 dc.set_rebuild()
@@ -469,7 +470,8 @@
                     "/catalog/1/catalog.attrs",
                     "/catalog/1/%s" % update
                 ]
-                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                repo = dc.get_repo()
+                v1_cat = repo.get_catalog("test1")
 
                 # Stop the depot server and put the old repository data back.
                 dc.stop()
@@ -477,7 +479,8 @@
                 shutil.move(trpath, dc.get_repodir())
                 dc.start()
                 expected = []
-                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                repo = dc.get_repo()
+                v1_cat = repo.get_catalog("test1")
 
                 # Now verify that a refresh induces a full retrieval.  The
                 # catalog.attrs file will be retrieved twice due to the
@@ -501,7 +504,8 @@
 
                 # Publish a new package.
                 self.pkgsend_bulk(self.durl1, self.foo12)
-                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                repo = dc.get_repo()
+                v1_cat = repo.get_catalog("test1")
 
                 # Refresh to get an incremental update, and verify it worked.
                 self.pkg("refresh")
@@ -510,7 +514,8 @@
                     "/catalog/1/catalog.attrs",
                     "/catalog/1/%s" % update
                 ]
-                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                repo = dc.get_repo()
+                v1_cat = repo.get_catalog("test1")
 
                 # Stop the depot server and put the old repository data back.
                 dc.stop()
@@ -523,7 +528,8 @@
                 # entry to exist, but at a different point in time in the
                 # update logs.
                 self.pkgsend_bulk(self.durl1, self.foo12)
-                v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
+                repo = dc.get_repo()
+                v1_cat = repo.get_catalog("test1")
                 update = v1_cat.updates.keys()[-1]
 
                 # Now verify that a refresh induces a full retrieval.  The
@@ -551,6 +557,7 @@
                 dc.start()
 
                 old_cat = os.path.join(self.test_root, "old-catalog")
+                cat_root = v1_cat.meta_root
                 shutil.copytree(v1_cat.meta_root, old_cat)
                 self.pkgsend_bulk(self.durl1, self.foo121)
                 v1_cat = catalog.Catalog(meta_root=cat_root, read_only=True)
@@ -606,7 +613,7 @@
                 # Next, purposefully corrupt the catalog.attrs file in the
                 # repository and attempt a refresh.  The client should fail
                 # gracefully.
-                f = open(os.path.join(cat_root, "catalog.attrs"), "wb")
+                f = open(os.path.join(v1_cat.meta_root, "catalog.attrs"), "wb")
                 f.write("INVALID")
                 f.close()
                 self.pkg("refresh", exit=1)
--- a/src/tests/cli/t_pkg_search.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkg_search.py	Thu Aug 19 23:33:49 2010 -0700
@@ -374,7 +374,7 @@
                 # the download directory doesn't exist.
                 cache_dir = os.path.join(self.img_path, "var", "pkg",
                     "download")
-                shutil.rmtree(cache_dir)
+                shutil.rmtree(cache_dir, ignore_errors=True)
                 self.assertFalse(os.path.exists(cache_dir))
                 self._search_op(True, "fo*", self.res_remote_foo, su_wrap=True)
 
@@ -482,6 +482,12 @@
                 index_dir_tmp = index_dir + "TMP"
                 return index_dir, index_dir_tmp
 
+        def pkgsend_bulk(self, durl, pkg):
+                # Ensure indexing is performed for every published package.
+                pkg5unittest.SingleDepotTestCase.pkgsend_bulk(self, durl, pkg,
+                    refresh_index=True)
+                self.wait_repo(self.dc.get_repodir())
+
 	def test_pkg_search_cli(self):
 		"""Test search cli options."""
 
@@ -508,7 +514,8 @@
                 self.pkg("search -o search.match -a '<example_path>'", exit=2)
                 self.pkg("search -o search.match_type -p pkg", exit=2)
                 self.pkg("search -o search.match_type -a '<pkg>'", exit=1)
-                self.pkg("search -o search.match_type -a '<example_path>'", exit=2)
+                self.pkg("search -o search.match_type -a '<example_path>'",
+                    exit=2)
                 self.pkg("search -o action.foo -a pkg", exit=2)
 
         def test_remote(self):
@@ -601,9 +608,8 @@
                 expected."""
 
                 durl = self.dc.get_depot_url()
-                self.pkgsend_bulk(durl, self.example_pkg10)
-                self.pkgsend_bulk(durl, self.example_pkg11)
-                self.pkgsend_bulk(durl, self.incorp_pkg10)
+                self.pkgsend_bulk(durl, (self.example_pkg10, self.example_pkg11,
+                    self.incorp_pkg10))
 
                 self.image_create(durl)
 
@@ -641,8 +647,7 @@
                 # This test can't be moved to t_api_search until bug 8497 has
                 # been resolved.
                 durl = self.dc.get_depot_url()
-                self.pkgsend_bulk(durl, self.fat_pkg10)
-                self.pkgsend_bulk(durl, self.example_pkg10)
+                self.pkgsend_bulk(durl, (self.fat_pkg10, self.example_pkg10))
 
                 self.image_create(durl)
 
--- a/src/tests/cli/t_pkgdep.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkgdep.py	Thu Aug 19 23:33:49 2010 -0700
@@ -692,8 +692,8 @@
                 expected = set(expected.splitlines())
                 seen_but_not_expected = self.__compare_res(seen, expected)
                 expected_but_not_seen = self.__compare_res(expected, seen)
-                self.assertEqualDiff(seen_but_not_expected,
-                    expected_but_not_seen)
+                self.assertEqualDiff(expected_but_not_seen,
+                    seen_but_not_expected)
 
         def test_opts(self):
                 """Ensure that incorrect arguments don't cause a traceback."""
--- a/src/tests/cli/t_pkgdep_resolve.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkgdep_resolve.py	Thu Aug 19 23:33:49 2010 -0700
@@ -38,7 +38,7 @@
 import pkg.publish.dependencies as dependencies
 from pkg.fmri import PkgFmri
 
-API_VERSION = 40
+API_VERSION = 42
 PKG_CLIENT_NAME = "pkg"
 
 
--- a/src/tests/cli/t_pkgrecv.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkgrecv.py	Thu Aug 19 23:33:49 2010 -0700
@@ -139,8 +139,7 @@
                 path = urllib.url2pathname(parts[2])
 
                 try:
-                        return repo.Repository(auto_create=False,
-                            fork_allowed=False, repo_root=path)
+                        return repo.Repository(root=path)
                 except cfg.ConfigError, e:
                         raise repo.RepositoryError(_("The specified "
                             "repository's configuration data is not "
--- a/src/tests/cli/t_pkgrepo.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkgrepo.py	Thu Aug 19 23:33:49 2010 -0700
@@ -32,9 +32,10 @@
 from pkg.server.query_parser import Query
 import os
 import pkg
+import pkg.depotcontroller as dc
 import pkg.fmri as fmri
 import pkg.misc as misc
-import pkg.search_errors as se
+import pkg.server.repository as sr
 import shutil
 import tempfile
 import time
@@ -42,25 +43,25 @@
 import urlparse
 import unittest
 
-class TestPkgRepo(pkg5unittest.CliTestCase):
+class TestPkgRepo(pkg5unittest.SingleDepotTestCase):
         # Cleanup after every test.
         persistent_setup = False
 
         tree10 = """
             open [email protected],5.11-0
-            close 
+            close
         """
 
         amber10 = """
             open [email protected],5.11-0
             add depend fmri=pkg:/[email protected] type=require
-            close 
+            close
         """
 
         amber20 = """
             open [email protected],5.11-0
             add depend fmri=pkg:/[email protected] type=require
-            close 
+            close
         """
 
         truck10 = """
@@ -77,17 +78,16 @@
             add file tmp/truck1 mode=0444 owner=root group=bin path=/etc/truck1
             add file tmp/truck2 mode=0444 owner=root group=bin path=/etc/truck2
             add depend fmri=pkg:/[email protected] type=require
-            close 
+            close
         """
 
         zoo10 = """
             open [email protected],5.11-0
-            close 
+            close
         """
 
         def setUp(self):
-                pkg5unittest.CliTestCase.setUp(self)
-
+                pkg5unittest.SingleDepotTestCase.setUp(self)
                 self.make_misc_files(["tmp/empty", "tmp/truck1",
                     "tmp/truck2"])
 
@@ -120,11 +120,9 @@
                 # global option with no subcommand should exit with 2.
                 self.pkgrepo("-s %s" % self.test_root, exit=2)
 
-                # Verify an invalid URI causes an exit.  (For the moment,
-                # only the file scheme is supported.)
-                for baduri in ("file://not/valid", "http://localhost",
-                    "http://not$valid"):
-                        self.pkgrepo("-s %s" % baduri, exit=1)
+                # Verify an invalid URI causes an exit 2.
+                for baduri in ("file://not/valid", "http://not@$$_-^valid"):
+                        self.pkgrepo("info -s %s" % baduri, exit=2)
 
         def test_01_create(self):
                 """Verify pkgrepo create works as expected."""
@@ -132,23 +130,22 @@
                 # Verify create without a destination exits.
                 self.pkgrepo("create", exit=2)
 
-                # Verify create with an invalid URI exits.  (For the moment,
-                # only the file scheme is supported.)
-                for baduri in ("file://not/valid", "http://localhost",
-                    "http://not$valid"):
-                        self.pkgrepo("create %s" % baduri, exit=1)
+                # Verify create with an invalid URI as an operand exits with 2.
+                for baduri in ("file://not/valid", "http://not@$$_-^valid"):
+                        self.pkgrepo("create %s" % baduri, exit=2)
 
                 # Verify create works whether -s is used to supply the location
                 # of the new repository or it is passed as an operand.  Also
                 # verify that either a path or URI can be used to provide the
                 # repository's location.
-                repo_path = os.path.join(self.test_root, "repo")
-                repo_uri = "file:%s" % repo_path
+                repo_path = self.dc.get_repodir()
+                shutil.rmtree(repo_path)
+                repo_uri = self.dc.get_repo_url()
 
                 # Specify using global option and path.
-                self.pkgrepo("-s %s create" % repo_path)
+                self.pkgrepo("create -s %s" % repo_path)
                 # This will fail if a repository wasn't created.
-                self.get_repo(repo_path)
+                self.dc.get_repo()
                 shutil.rmtree(repo_path)
 
                 # Specify using operand and URI.
@@ -157,276 +154,592 @@
                 self.get_repo(repo_path)
                 shutil.rmtree(repo_path)
 
-        def test_02_property(self):
-                """Verify pkgrepo property and set-property works as expected.
-                """
+        def test_02_get_set_property(self):
+                """Verify pkgrepo get and set works as expected."""
 
                 # Verify command without a repository exits.
-                self.pkgrepo("property", exit=2)
-
-                # Create a repository.
-                repo_path = os.path.join(self.test_root, "repo")
-                repo_uri = "file:%s" % repo_path
-                self.assert_(not os.path.exists(repo_path))
-                self.pkgrepo("-s %s create" % repo_path)
+                self.pkgrepo("get", exit=2)
 
-                # Verify property handles unknown properties gracefully.
-                self.pkgrepo("-s %s property repository/unknown" % repo_uri,
-                    exit=1)
+                # Create a repository (a version 3 one is needed for these
+                # tests).
+                repo_path = self.dc.get_repodir()
+                repo_uri = self.dc.get_repo_url()
+                depot_uri = self.dc.get_depot_url()
+                shutil.rmtree(repo_path)
+                self.assert_(not os.path.exists(repo_path))
+                self.pkgrepo("create -s %s --version=3" % repo_path)
 
-                # Verify property returns partial failure if only some
+                # Verify get handles unknown properties gracefully.
+                self.pkgrepo("get -s %s repository/unknown" % repo_uri, exit=1)
+
+                # Verify get returns partial failure if only some
                 # properties cannot be found.
-                self.pkgrepo("-s %s property repository/origins "
+                self.pkgrepo("get -s %s repository/origins "
                     "repository/unknown" % repo_uri, exit=3)
 
-                # Verify full default output.
-                self.pkgrepo("-s %s property" % repo_uri)
-                expected = """\
+                # Verify full default output for both network and file case.
+                self.dc.start()
+                for uri in (repo_uri, depot_uri):
+                        self.pkgrepo("get -s %s" % uri)
+                        expected = """\
 SECTION    PROPERTY           VALUE
-feed       description        
+feed       description        ""
 feed       icon               web/_themes/pkg-block-icon.png
-feed       id                 
+feed       id                 ""
 feed       logo               web/_themes/pkg-block-logo.png
-feed       name               package repository feed
+feed       name               package\ repository\ feed
 feed       window             24
-publisher  alias              
-publisher  intermediate_certs []
-publisher  prefix             
-publisher  signing_ca_certs   []
+publisher  alias              ""
+publisher  intermediate_certs ()
+publisher  prefix             test
+publisher  signing_ca_certs   ()
 repository collection_type    core
-repository description        
-repository detailed_url       
-repository legal_uris         []
-repository maintainer         
-repository maintainer_url     
-repository mirrors            []
-repository name               package repository
-repository origins            []
+repository description        ""
+repository detailed_url       ""
+repository legal_uris         ()
+repository maintainer         ""
+repository maintainer_url     ""
+repository mirrors            ()
+repository name               package\ repository
+repository origins            ()
 repository refresh_seconds    14400
-repository registration_uri   
-repository related_uris       []
+repository registration_uri   ""
+repository related_uris       ()
+repository version            3
 """
-                self.assertEqualDiff(expected, self.output)
+                        self.assertEqualDiff(expected, self.output)
+                self.dc.stop()
 
                 # Verify full tsv output.
-                self.pkgrepo("-s %s property -Ftsv" % repo_uri)
+                self.pkgrepo("get -s %s -Ftsv" % repo_uri)
                 expected = """\
 SECTION\tPROPERTY\tVALUE
-feed\tdescription\t
+feed\tdescription\t""
 feed\ticon\tweb/_themes/pkg-block-icon.png
-feed\tid\t
+feed\tid\t""
 feed\tlogo\tweb/_themes/pkg-block-logo.png
-feed\tname\tpackage repository feed
+feed\tname\tpackage\ repository\ feed
 feed\twindow\t24
-publisher\talias\t
-publisher\tintermediate_certs\t[]
-publisher\tprefix\t
-publisher\tsigning_ca_certs\t[]
+publisher\talias\t""
+publisher\tintermediate_certs\t()
+publisher\tprefix\ttest
+publisher\tsigning_ca_certs\t()
 repository\tcollection_type\tcore
-repository\tdescription\t
-repository\tdetailed_url\t
-repository\tlegal_uris\t[]
-repository\tmaintainer\t
-repository\tmaintainer_url\t
-repository\tmirrors\t[]
-repository\tname\tpackage repository
-repository\torigins\t[]
+repository\tdescription\t""
+repository\tdetailed_url\t""
+repository\tlegal_uris\t()
+repository\tmaintainer\t""
+repository\tmaintainer_url\t""
+repository\tmirrors\t()
+repository\tname\tpackage\ repository
+repository\torigins\t()
 repository\trefresh_seconds\t14400
-repository\tregistration_uri\t
-repository\trelated_uris\t[]
+repository\tregistration_uri\t""
+repository\trelated_uris\t()
+repository\tversion\t3
 """
                 self.assertEqualDiff(expected, self.output)
 
                 # Verify that -H omits headers for full output.
-                self.pkgrepo("-s %s property -H" % repo_uri)
+                self.pkgrepo("get -s %s -H" % repo_uri)
                 self.assert_(self.output.find("SECTION") == -1)
 
-                # Verify specific property default output and that
-                # -H omits headers for specific property output.
-                self.pkgrepo("-s %s property publisher/prefix" %
+                # Verify specific get default output and that
+                # -H omits headers for specific get output.
+                self.pkgrepo("get -s %s publisher/prefix" %
                     repo_uri)
                 expected = """\
 SECTION    PROPERTY           VALUE
-publisher  prefix             
+publisher  prefix             test
 """
                 self.assertEqualDiff(expected, self.output)
 
-                
-                self.pkgrepo("-s %s property -H publisher/prefix "
+                self.pkgrepo("get -s %s -H publisher/prefix "
                     "repository/origins" % repo_uri)
                 expected = """\
-publisher  prefix             
-repository origins            []
+publisher  prefix             test
+repository origins            ()
 """
                 self.assertEqualDiff(expected, self.output)
 
-                # Verify specific property tsv output.
-                self.pkgrepo("-s %s property -F tsv publisher/prefix" %
+                # Verify specific get tsv output.
+                self.pkgrepo("get -s %s -F tsv publisher/prefix" %
                     repo_uri)
                 expected = """\
 SECTION\tPROPERTY\tVALUE
-publisher\tprefix\t
+publisher\tprefix\ttest
 """
                 self.assertEqualDiff(expected, self.output)
 
-                self.pkgrepo("-s %s property -HF tsv publisher/prefix "
+                self.pkgrepo("get -s %s -HF tsv publisher/prefix "
                     "repository/origins" % repo_uri)
                 expected = """\
-publisher\tprefix\t
-repository\torigins\t[]
+publisher\tprefix\ttest
+repository\torigins\t()
 """
                 self.assertEqualDiff(expected, self.output)
 
-                # Verify set-property fails if no property is provided.
-                self.pkgrepo("-s %s set-property" % repo_uri, exit=2)
+                # Verify set fails if no property is provided.
+                self.pkgrepo("set -s %s" % repo_uri, exit=2)
 
-                # Verify set-property gracefully handles bad property values.
-                self.pkgrepo("-s %s set-property publisher/prefix=_invalid" %
-                    repo_uri, exit=1)
+                # Verify set gracefully handles bad property values.
+                self.pkgrepo("set -s %s publisher/prefix=_invalid" %repo_uri,
+                    exit=1)
 
-                # Verify set-property can set single value properties.
-                self.pkgrepo("-s %s set-property "
-                    "publisher/prefix=opensolaris.org" % repo_uri)
-                self.pkgrepo("-s %s property -HF tsv publisher/prefix" %
+                # Verify set can set single value properties.
+                self.pkgrepo("set -s %s publisher/prefix=opensolaris.org" %
                     repo_uri)
+                self.pkgrepo("get -s %s -HF tsv publisher/prefix" % repo_uri)
                 expected = """\
 publisher\tprefix\topensolaris.org
 """
                 self.assertEqualDiff(expected, self.output)
 
-                # Verify set-property can set multi-value properties.
-                self.pkgrepo("-s %s set-property "
+                # Verify set can set multi-value properties.
+                self.pkgrepo("set -s %s "
                     "'repository/origins=(http://pkg.opensolaris.org/dev "
                     "http://pkg-eu-2.opensolaris.org/dev)'" % repo_uri)
-                self.pkgrepo("-s %s property -HF tsv repository/origins" %
-                    repo_uri)
+                self.pkgrepo("get -s %s -HF tsv repository/origins" % repo_uri)
                 expected = """\
-repository\torigins\t['http://pkg.opensolaris.org/dev', 'http://pkg-eu-2.opensolaris.org/dev']
+repository\torigins\t(http://pkg.opensolaris.org/dev http://pkg-eu-2.opensolaris.org/dev)
 """
                 self.assertEqualDiff(expected, self.output)
 
-                # Verify set-property can set unknown properties.
-                self.pkgrepo("-s %s set-property 'foo/bar=value'" % repo_uri)
-                self.pkgrepo("-s %s property -HF tsv foo/bar" % repo_uri)
+                # Verify set can set unknown properties.
+                self.pkgrepo("set -s %s 'foo/bar=value'" % repo_uri)
+                self.pkgrepo("get -s %s -HF tsv foo/bar" % repo_uri)
                 expected = """\
 foo\tbar\tvalue
 """
                 self.assertEqualDiff(expected, self.output)
 
-        def test_03_publisher(self):
-                """Verify pkgrepo publisher works as expected."""
+                # Create a repository (a version 3 one is needed for this
+                # test).
+                repo_path = self.dc.get_repodir()
+                repo_uri = self.dc.get_repo_url()
+                depot_uri = self.dc.get_depot_url()
+                shutil.rmtree(repo_path)
+                self.assert_(not os.path.exists(repo_path))
+                self.pkgrepo("create -s %s --version=3" % repo_path)
+                self.pkgrepo("set -s %s publisher/prefix=test" % repo_path)
+
+                # Verify setting publisher properties fails for version 3
+                # repositories.
+                self.pkgrepo("set -s %s -p all "
+                    "repository/origins=http://localhost" % repo_uri, exit=1)
+
+                # Create version 4 repository.
+                shutil.rmtree(repo_path)
+                self.assert_(not os.path.exists(repo_path))
+                self.create_repo(repo_path)
+
+                # Verify get handles unknown publishers gracefully.
+                self.pkgrepo("get -s %s -p test repository/origins" % repo_uri,
+                    exit=1)
+
+                # Add a publisher by setting properties for one that doesn't
+                # exist yet.
+                self.pkgrepo("set -s %s -p test "
+                    "repository/name='package repository' "
+                    "repository/refresh-seconds=7200" %
+                    repo_uri)
+
+                # Verify get handles unknown properties gracefully.
+                self.pkgrepo("get -s %s -p test repository/unknown" % repo_uri,
+                    exit=1)
+
+                # Verify get returns partial failure if only some properties
+                # cannot be found.
+                self.pkgrepo("get -s %s -p all repository/origins "
+                    "repository/unknown" % repo_uri, exit=3)
 
-                # Verify command without a repository exits.
-                self.pkgrepo("publisher", exit=2)
+                # Verify full default output for both network and file case.
+                self.dc.start()
+                for uri in (repo_uri, depot_uri):
+                        self.pkgrepo("get -s %s -p all" % uri)
+                        expected = """\
+PUBLISHER SECTION    PROPERTY         VALUE
+test      publisher  alias            
+test      publisher  prefix           test
+test      repository collection-type  core
+test      repository description      
+test      repository legal-uris       ()
+test      repository mirrors          ()
+test      repository name             package\ repository
+test      repository origins          ()
+test      repository refresh-seconds  7200
+test      repository registration-uri ""
+test      repository related-uris     ()
+"""
+                        self.assertEqualDiff(expected, self.output)
+                self.dc.stop()
+
+                # Verify full tsv output.
+                self.pkgrepo("get -s %s -p all -Ftsv" % repo_uri)
+                expected = """\
+PUBLISHER\tSECTION\tPROPERTY\tVALUE
+test\tpublisher\talias\t
+test\tpublisher\tprefix\ttest
+test\trepository\tcollection-type\tcore
+test\trepository\tdescription\t
+test\trepository\tlegal-uris\t()
+test\trepository\tmirrors\t()
+test\trepository\tname\tpackage\ repository
+test\trepository\torigins\t()
+test\trepository\trefresh-seconds\t7200
+test\trepository\tregistration-uri\t""
+test\trepository\trelated-uris\t()
+"""
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify that -H omits headers for full output.
+                self.pkgrepo("get -s %s -p all -H" % repo_uri)
+                self.assert_(self.output.find("SECTION") == -1)
+
+                # Verify specific get default output and that
+                # -H omits headers for specific get output.
+                self.pkgrepo("get -s %s -p all publisher/prefix" %
+                    repo_uri)
+                expected = """\
+PUBLISHER SECTION    PROPERTY         VALUE
+test      publisher  prefix           test
+"""
+                self.assertEqualDiff(expected, self.output)
+
+                self.pkgrepo("get -s %s -p all -H publisher/prefix "
+                    "repository/origins" % repo_uri)
+                expected = """\
+test      publisher  prefix           test
+test      repository origins          ()
+"""
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify specific get tsv output.
+                self.pkgrepo("get -s %s -p all -F tsv publisher/prefix" %
+                    repo_uri)
+                expected = """\
+PUBLISHER\tSECTION\tPROPERTY\tVALUE
+test\tpublisher\tprefix\ttest
+"""
+                self.assertEqualDiff(expected, self.output)
+
+                self.pkgrepo("get -s %s -HF tsv -p all publisher/prefix "
+                    "repository/origins" % repo_uri)
+                expected = """\
+test\tpublisher\tprefix\ttest
+test\trepository\torigins\t()
+"""
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify set fails if no property is provided.
+                self.pkgrepo("set -s %s -p test" % repo_uri, exit=2)
 
-                # Create a repository.
-                repo_path = os.path.join(self.test_root, "repo")
-                repo_uri = "file:%s" % repo_path
-                self.assert_(not os.path.exists(repo_path))
-                self.pkgrepo("-s %s create" % repo_path)
+                # Verify set gracefully handles bad property values and
+                # properties that can't be set.
+                self.pkgrepo("set -s %s -p test publisher/alias=_invalid" %
+                    repo_uri, exit=1)
+                self.pkgrepo("set -s %s -p test publisher/prefix=_invalid" %
+                    repo_uri, exit=2)
+
+                # Verify set can set single value properties.
+                self.pkgrepo("set -s %s -p all publisher/alias=test1" %
+                    repo_uri)
+                self.pkgrepo("get -s %s -p all -HF tsv publisher/alias "
+                    "publisher/prefix" % repo_uri)
+                expected = """\
+test\tpublisher\talias\ttest1
+test\tpublisher\tprefix\ttest
+"""
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify set can set multi-value properties.
+                self.pkgrepo("set -s %s -p all "
+                    "'repository/origins=(http://pkg.opensolaris.org/dev "
+                    "http://pkg-eu-2.opensolaris.org/dev)'" % repo_uri)
+                self.pkgrepo("get -s %s -p all -HF tsv repository/origins" %
+                    repo_uri)
+                expected = """\
+test\trepository\torigins\t(http://pkg-eu-2.opensolaris.org/dev/ http://pkg.opensolaris.org/dev/)
+"""
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify set can not set unknown properties.
+                self.pkgrepo("set -s %s -p all 'foo/bar=value'" % repo_uri,
+                    exit=2)
+
+                # Add another publisher by setting a property for it.
+                self.pkgrepo("set -p test2 -s %s publisher/alias=''" % repo_uri)
+
+                # Verify get returns properties for multiple publishers.
+                expected = """\
+test\tpublisher\talias\ttest1
+test\tpublisher\tprefix\ttest
+test\trepository\tcollection-type\tcore
+test\trepository\tdescription\t
+test\trepository\tlegal-uris\t()
+test\trepository\tmirrors\t()
+test\trepository\tname\tpackage\ repository
+test\trepository\torigins\t(http://pkg-eu-2.opensolaris.org/dev/ http://pkg.opensolaris.org/dev/)
+test\trepository\trefresh-seconds\t7200
+test\trepository\tregistration-uri\t""
+test\trepository\trelated-uris\t()
+test2\tpublisher\talias\t""
+test2\tpublisher\tprefix\ttest2
+test2\trepository\tcollection-type\tcore
+test2\trepository\tdescription\t""
+test2\trepository\tlegal-uris\t()
+test2\trepository\tmirrors\t()
+test2\trepository\tname\t""
+test2\trepository\torigins\t()
+test2\trepository\trefresh-seconds\t""
+test2\trepository\tregistration-uri\t""
+test2\trepository\trelated-uris\t()
+"""
+                self.pkgrepo("get -s %s -p all -HFtsv" % repo_uri)
+                self.assertEqualDiff(expected, self.output)
+
+                self.pkgrepo("get -s %s -p test -p test2 -HFtsv" % repo_uri)
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify get can list multiple specific properties for
+                # multiple specific publishers correctly.
+                expected = """\
+test\tpublisher\talias\ttest1
+test2\tpublisher\talias\t""
+"""
+                self.pkgrepo("get -s %s -HFtsv -p test -p test2 "
+                    "publisher/alias" % repo_uri)
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify get has correct output even when some publishers
+                # can't be found (and exits with partial failure).
+                expected = """\
+test\tpublisher\talias\ttest1
+test\tpublisher\tprefix\ttest
+test\trepository\tcollection-type\tcore
+test\trepository\tdescription\t
+test\trepository\tlegal-uris\t()
+test\trepository\tmirrors\t()
+test\trepository\tname\tpackage\ repository
+test\trepository\torigins\t(http://pkg-eu-2.opensolaris.org/dev/ http://pkg.opensolaris.org/dev/)
+test\trepository\trefresh-seconds\t7200
+test\trepository\tregistration-uri\t""
+test\trepository\trelated-uris\t()
+"""
+                self.pkgrepo("get -s %s -p test -p bogus -HFtsv" % repo_uri,
+                    exit=3)
+                self.assertEqualDiff(expected, self.output)
+
+                # Verify set can set multiple properties for all or specific
+                # publishers when multiple publishers are known.
+                self.pkgrepo("set -s %s -p all "
+                    "repository/description='Support Repository'" % repo_uri)
+                expected = """\
+test\trepository\tdescription\tSupport\\ Repository
+test2\trepository\tdescription\tSupport\\ Repository
+"""
+                self.pkgrepo("get -s %s -HFtsv -p all repository/description" %
+                    repo_uri)
+                self.assertEqualDiff(expected, self.output)
+
+                self.pkgrepo("set -s %s -p test2 "
+                    "repository/description='2nd Support Repository'" %
+                        repo_uri)
+                expected = """\
+test\trepository\tdescription\tSupport\\ Repository
+test2\trepository\tdescription\t2nd\\ Support\\ Repository
+"""
+                self.pkgrepo("get -s %s -HFtsv -p all repository/description" %
+                    repo_uri)
+                self.assertEqualDiff(expected, self.output)
+
+        def __test_info(self, repo_path, repo_uri):
+                """Private function to verify publisher subcommand behaviour."""
 
                 # Verify subcommand behaviour for empty repository and -H
                 # functionality.
-                self.pkgrepo("-s %s publisher" % repo_uri)
+                self.pkgrepo("info -s %s" % repo_uri)
                 expected = """\
-PUBLISHER                PACKAGES VERSIONS UPDATED
+PUBLISHER PACKAGES STATUS           UPDATED
 """
                 self.assertEqualDiff(expected, self.output)
 
-                self.pkgrepo("-s %s publisher -H" % repo_uri)
+                self.pkgrepo("info -s %s -H" % repo_uri)
                 expected = """\
 """
                 self.assertEqualDiff(expected, self.output)
 
                 # Set a default publisher.
-                self.pkgrepo("-s %s set-property publisher/prefix=test" %
-                    repo_uri)
+                self.pkgrepo("set -s %s publisher/prefix=test" % repo_path)
+
+                # If a depot is running, this will trigger a reload of the
+                # configuration data.
+                self.dc.refresh()
 
                 # Publish some packages.
                 self.pkgsend_bulk(repo_uri, (self.tree10, self.amber10,
                     self.amber20, self.truck10, self.truck20))
 
-                # Verify publisher handles unknown publishers gracefully.
-                self.pkgrepo("-s %s publisher unknown" % repo_uri, exit=1)
+                # Verify info handles unknown publishers gracefully.
+                self.pkgrepo("info -s %s -p unknown" % repo_uri, exit=1)
 
-                # Verify publisher returns partial failure if only some
-                # publishers cannot be found.
-                self.pkgrepo("-s %s publisher test unknown" % repo_uri, exit=3)
+                # Verify info returns partial failure if only some publishers
+                # cannot be found.
+                self.pkgrepo("info -s %s -p test -p unknown" % repo_uri, exit=3)
 
                 # Verify full default output.
                 repo = self.get_repo(repo_path)
-                self.pkgrepo("-s %s publisher -H" % repo_uri)
+                self.pkgrepo("info -s %s -H" % repo_uri)
+                cat = repo.get_catalog("test")
+                cat_lm = cat.last_modified.isoformat()
                 expected = """\
-test                     3        5        %sZ
-""" % repo.catalog.last_modified.isoformat()
+test      3        online           %sZ
+""" % cat_lm
                 self.assertEqualDiff(expected, self.output)
 
                 # Verify full tsv output.
-                self.pkgrepo("-s %s publisher -HF tsv" % repo_uri)
+                self.pkgrepo("info -s %s -HF tsv" % repo_uri)
                 expected = """\
-test\t3\t5\t%sZ
-""" % repo.catalog.last_modified.isoformat()
+test\t3\tonline\t%sZ
+""" % cat_lm
                 self.assertEqualDiff(expected, self.output)
 
-                # Verify specific publisher default output.
-                self.pkgrepo("-s %s publisher -H test" % repo_uri)
+                # Verify info specific publisher default output.
+                self.pkgrepo("info -s %s -H -p test" % repo_uri)
                 expected = """\
-test                     3        5        %sZ
-""" % repo.catalog.last_modified.isoformat()
+test      3        online           %sZ
+""" % cat_lm
                 self.assertEqualDiff(expected, self.output)
 
-                # Verify specific publisher tsv output.
-                self.pkgrepo("-s %s publisher -HF tsv test" % repo_uri)
+                # Verify info specific publisher tsv output.
+                self.pkgrepo("info -s %s -HF tsv -p test" % repo_uri)
                 expected = """\
-test\t3\t5\t%sZ
-""" % repo.catalog.last_modified.isoformat()
+test\t3\tonline\t%sZ
+""" % cat_lm
                 self.assertEqualDiff(expected, self.output)
 
-        def test_04_rebuild(self):
-                """Verify pkgrepo rebuild works as expected."""
+        def test_03_info(self):
+                """Verify pkgrepo info works as expected."""
+
+                # Verify command without a repository exits.
+                self.pkgrepo("info", exit=2)
 
-                # Verify create without a destination exits.
-                self.pkgrepo("rebuild", exit=2)
+                # Create a repository, verify file-based repository access,
+                # and then discard the repository.
+                repo_path = self.dc.get_repodir()
+                repo_uri = self.dc.get_repo_url()
+                shutil.rmtree(repo_path)
+                self.create_repo(repo_path)
+                self.__test_info(repo_path, repo_uri)
+                shutil.rmtree(repo_path)
 
-                # Create a repository.
-                repo_path = os.path.join(self.test_root, "repo")
-                repo_uri = "file:%s" % repo_path
+                # Create a repository and verify http-based repository access.
                 self.assert_(not os.path.exists(repo_path))
-                repo = self.create_repo(repo_path, properties={ "publisher": {
-                    "prefix": "test" } })
+                self.create_repo(repo_path)
+                self.dc.clear_property("publisher", "prefix")
+                self.dc.start()
+                repo_uri = self.dc.get_depot_url()
+                self.__test_info(repo_path, repo_uri)
+                self.dc.stop()
 
+        def __test_rebuild(self, repo_path, repo_uri):
+                """Private function to verify rebuild subcommand behaviour."""
+
+                #
                 # Verify rebuild works for an empty repository.
-                lm = repo.catalog.last_modified.isoformat()
-                self.pkgrepo("-s %s rebuild" % repo_path)
+                #
+                repo = self.get_repo(repo_path)
+                lm = repo.get_catalog("test").last_modified.isoformat()
+                self.pkgrepo("rebuild -s %s" % repo_uri)
+                self.wait_repo(repo_path)
                 repo = self.get_repo(repo_path)
-                self.assertNotEqual(lm, repo.catalog.last_modified.isoformat())
+                nlm = repo.get_catalog("test").last_modified.isoformat()
+                self.assertNotEqual(lm, nlm)
+
+                #
+                # Verify rebuild --no-index works for an empty repository.
+                #
+                lm = repo.get_catalog("test").last_modified.isoformat()
+                self.pkgrepo("rebuild -s %s --no-index" % repo_uri)
+                self.wait_repo(repo_path)
+                repo = self.get_repo(repo_path)
+                nlm = repo.get_catalog("test").last_modified.isoformat()
+                self.assertNotEqual(lm, nlm)
 
-                # Publish some packages.
+                #
+                # Verify rebuild --no-catalog works for an empty repository,
+                # and that the catalog itself does not change.
+                #
+                lm = repo.get_catalog("test").last_modified.isoformat()
+                self.pkgrepo("rebuild -s %s --no-catalog" % repo_uri)
+                self.wait_repo(repo_path)
+                repo = self.get_repo(repo_path)
+                nlm = repo.get_catalog("test").last_modified.isoformat()
+                self.assertEqual(lm, nlm)
+
+                #
+                # Publish some packages and verify they are known afterwards.
+                #
                 plist = self.pkgsend_bulk(repo_uri, (self.amber10, self.tree10))
-
-                # Check that the published packages are seen.
                 repo = self.get_repo(repo_path)
                 self.assertEqual(list(
-                    str(f) for f in repo.catalog.fmris(ordered=True)
+                    str(f) for f in repo.get_catalog("test").fmris(ordered=True)
                 ), plist)
 
                 #
+                # Verify that rebuild --no-catalog works for a repository with
+                # packages.
+                #
+
+                # Now rebuild and verify packages are still known and catalog
+                # remains unchanged.
+                lm = repo.get_catalog("test").last_modified.isoformat()
+                self.pkgrepo("rebuild -s %s --no-catalog" % repo_uri)
+                self.wait_repo(repo_path)
+                repo = self.get_repo(repo_path)
+                self.assertEqual(plist,
+                    list(str(f) for f in repo.get_catalog("test").fmris(
+                    ordered=True)))
+                nlm = repo.get_catalog("test").last_modified.isoformat()
+                self.assertEqual(lm, nlm)
+
+                # Destroy the catalog.
+                repo.get_catalog("test").destroy()
+
+                # Reload the repository object and verify no packages are known.
+                repo = self.get_repo(repo_path)
+                self.assertEqual(set(), repo.get_catalog("test").names())
+
+                # Now rebuild and verify packages are still unknown and catalog
+                # remains unchanged.
+                lm = repo.get_catalog("test").last_modified.isoformat()
+                self.pkgrepo("rebuild -s %s --no-catalog" % repo_uri)
+                self.wait_repo(repo_path)
+                repo = self.get_repo(repo_path)
+                self.assertEqual(set(), repo.get_catalog("test").names())
+                nlm = repo.get_catalog("test").last_modified.isoformat()
+                self.assertEqual(lm, nlm)
+
+                #
                 # Verify rebuild will find all the packages again and that they
                 # can be searched for.
                 #
 
-                # Destroy the catalog and index.
-                repo.catalog.destroy()
-                shutil.rmtree(repo.index_root)
+                # Destroy the catalog.
+                repo.get_catalog("test").destroy()
 
                 # Reload the repository object and verify no packages are known.
                 repo = self.get_repo(repo_path)
-                self.assertEqual(set(), repo.catalog.names())
+                self.assertEqual(set(), repo.get_catalog("test").names())
 
-                self.pkgrepo("-s %s rebuild" % repo_uri)
+                # Now rebuild and verify packages are known and can be searched
+                # for.
+                self.pkgrepo("rebuild -s %s" % repo_uri)
+                self.wait_repo(repo_path)
                 repo = self.get_repo(repo_path)
                 self.assertEqual(plist,
-                    list(str(f) for f in repo.catalog.fmris(ordered=True)))
+                    list(str(f) for f in repo.get_catalog("test").fmris(
+                    ordered=True)))
 
                 query = Query("tree", False, Query.RETURN_PACKAGES, None, None)
                 result = list(e for e in [r for r in repo.search([query])][0])
@@ -453,16 +766,18 @@
 
                 # Destroy the catalog only (to verify that rebuild destroys
                 # the index).
-                repo.catalog.destroy()
+                repo.get_catalog("test").destroy()
 
                 # Reload the repository object and verify no packages are known.
                 repo = self.get_repo(repo_path)
-                self.assertEqual(set(), repo.catalog.names())
+                self.assertEqual(set(), repo.get_catalog("test").names())
 
-                self.pkgrepo("-s %s rebuild --no-index" % repo_uri)
+                self.pkgrepo("rebuild -s %s --no-index" % repo_uri)
+                self.wait_repo(repo_path)
                 repo = self.get_repo(repo_path, read_only=True)
                 self.assertEqual(plist,
-                    list(str(f) for f in repo.catalog.fmris(ordered=True)))
+                    list(str(f) for f in repo.get_catalog("test").fmris(
+                    ordered=True)))
 
                 query = Query("tree", False, Query.RETURN_PACKAGES, None, None)
                 try:
@@ -472,52 +787,65 @@
                             ][0]
                         )
                 except Exception, e:
-                        self.assert_(isinstance(e, se.NoIndexException))
+                        self.debug("query exception: %s" % e)
+                        self.assert_(isinstance(e,
+                            sr.RepositorySearchUnavailableError))
                 else:
-                        raise RuntimeError("Expected NoIndexException")
+                        raise RuntimeError("Expected "
+                            "RepositorySearchUnavailableError")
 
-        def test_05_refresh(self):
-                """Verify pkgrepo refresh works as expected."""
+        def test_04_rebuild(self):
+                """Verify pkgrepo rebuild works as expected."""
+
+                # Verify rebuild without a target exits.
+                self.pkgrepo("rebuild", exit=2)
 
-                # Verify create without a destination exits.
-                self.pkgrepo("refresh", exit=2)
+                # Create a repository, verify file-based repository access,
+                # and then discard the repository.
+                repo_path = self.dc.get_repodir()
+                repo_uri = self.dc.get_repo_url()
+                self.__test_rebuild(repo_path, repo_uri)
+                shutil.rmtree(repo_path)
 
-                # Create a repository.
-                repo_path = os.path.join(self.test_root, "repo")
-                repo_uri = "file:%s" % repo_path
+                # Create a repository and verify network-based repository
+                # access.
                 self.assert_(not os.path.exists(repo_path))
-                repo = self.create_repo(repo_path, properties={ "publisher": {
+                self.create_repo(repo_path, properties={ "publisher": {
                     "prefix": "test" } })
+                self.dc.clear_property("publisher", "prefix")
+                self.dc.start()
+                repo_uri = self.dc.get_depot_url()
+                self.__test_rebuild(repo_path, repo_uri)
+                self.dc.stop()
+
+        def __test_refresh(self, repo_path, repo_uri):
+                """Private function to verify refresh subcommand behaviour."""
 
                 # Verify refresh doesn't fail for an empty repository.
-                self.pkgrepo("-s %s refresh" % repo_path)
+                self.pkgrepo("refresh -s %s" % repo_path)
+                self.wait_repo(repo_path)
 
                 # Publish some packages.
                 plist = self.pkgsend_bulk(repo_uri, (self.amber10, self.tree10))
 
-                # Check that the published packages are seen.
-                repo = self.get_repo(repo_path)
-                self.assertEqual(list(
-                    str(f) for f in repo.catalog.fmris(ordered=True)
-                ), plist)
-
                 #
                 # Verify refresh will find new packages and that they can be
                 # searched for.
                 #
 
-                # Destroy the index.
-                shutil.rmtree(repo.index_root)
-
-                # Reload the repository object.
+                # Reload the repository object and verify published packages
+                # are known.
                 repo = self.get_repo(repo_path, read_only=True)
                 self.assertEqual(plist,
-                    list(str(f) for f in repo.catalog.fmris(ordered=True)))
+                    list(str(f) for f in repo.get_catalog("test").fmris(
+                    ordered=True)))
 
-                self.pkgrepo("-s %s refresh" % repo_uri)
+                self.pkgrepo("refresh -s %s" % repo_uri)
+                self.wait_repo(repo_path)
                 repo = self.get_repo(repo_path, read_only=True)
                 self.assertEqual(plist,
-                    list(str(f) for f in repo.catalog.fmris(ordered=True)))
+                    list(str(f) for f in repo.get_catalog("test").fmris(
+                    ordered=True)))
 
                 query = Query("tree", False, Query.RETURN_PACKAGES, None, None)
                 result = list(e for e in [r for r in repo.search([query])][0])
@@ -541,14 +869,15 @@
                 # Now publish a new package and refresh again with --no-index,
                 # and verify that search data doesn't include the new package.
                 #
-                plist.extend(self.pkgsend_bulk(repo_uri, self.truck10,
-                    no_index=True))
+                plist.extend(self.pkgsend_bulk(repo_uri, self.truck10))
                 fmris.append(fmri.PkgFmri(plist[-1]).get_fmri(anarchy=True))
 
-                self.pkgrepo("-s %s refresh --no-index" % repo_uri)
+                self.pkgrepo("refresh -s %s --no-index" % repo_uri)
+                self.wait_repo(repo_path)
                 repo = self.get_repo(repo_path, read_only=True)
                 self.assertEqualDiff(plist,
-                    list(str(f) for f in repo.catalog.fmris(ordered=True)))
+                    list(str(f) for f in repo.get_catalog("test").fmris(
+                    ordered=True)))
 
                 query = Query("truck", False, Query.RETURN_PACKAGES, None, None)
                 result = list(e for e in [r for r in repo.search([query])][0])
@@ -561,13 +890,14 @@
                 # search.
                 #
                 plist.extend(self.pkgsend_bulk(repo_uri, self.zoo10,
-                    no_catalog=True, no_index=True))
+                    no_catalog=True))
                 fmris.append(fmri.PkgFmri(plist[-1]).get_fmri(anarchy=True))
 
-                self.pkgrepo("-s %s refresh --no-catalog" % repo_uri)
+                self.pkgrepo("refresh -s %s --no-catalog" % repo_uri)
+                self.wait_repo(repo_path)
                 repo = self.get_repo(repo_path, read_only=True)
                 self.assertEqual(plist[:-1], list(
-                    str(f) for f in repo.catalog.fmris(ordered=True)
+                    str(f) for f in repo.get_catalog("test").fmris(ordered=True)
                 ))
 
                 query = Query("truck", False, Query.RETURN_PACKAGES, None, None)
@@ -582,12 +912,37 @@
 
                 # Finally, run refresh once more and verify that all packages
                 # are now visible in the catalog.
-                self.pkgrepo("-s %s refresh" % repo_uri)
+                self.pkgrepo("refresh -s %s" % repo_uri)
+                self.wait_repo(repo_path)
                 repo = self.get_repo(repo_path, read_only=True)
                 self.assertEqual(plist, list(
-                    str(f) for f in repo.catalog.fmris(ordered=True)
+                    str(f) for f in repo.get_catalog("test").fmris(ordered=True)
                 ))
 
+        def test_05_refresh(self):
+                """Verify pkgrepo refresh works as expected."""
+
+                # Verify create without a destination exits.
+                self.pkgrepo("refresh", exit=2)
+
+                # Create a repository, verify file-based repository access,
+                # and then discard the repository.
+                repo_path = self.dc.get_repodir()
+                repo_uri = self.dc.get_repo_url()
+                self.__test_refresh(repo_path, repo_uri)
+                shutil.rmtree(repo_path)
+
+                # Create a repository and verify network-based repository
+                # access.
+                self.assert_(not os.path.exists(repo_path))
+                self.create_repo(repo_path, properties={ "publisher": {
+                    "prefix": "test" } })
+                self.dc.clear_property("publisher", "prefix")
+                self.dc.start()
+                repo_uri = self.dc.get_depot_url()
+                self.__test_refresh(repo_path, repo_uri)
+                self.dc.stop()
+
         def test_06_version(self):
                 """Verify pkgrepo version works as expected."""
 
@@ -596,57 +951,119 @@
 
                 # Verify version exits with error if a repository location is
                 # provided.
-                self.pkgrepo("-s %s version" % self.test_root, exit=2)
+                self.pkgrepo("version -s %s" % self.test_root, exit=2)
 
                 # Verify version output is sane.
                 self.pkgrepo("version")
                 self.assert_(self.output.find(pkg.VERSION) != -1)
 
+        def __test_add_remove_certs(self, repo_uri, pubs=[]):
+                """Private helper method to test certificate add and remove
+                for default publisher case."""
+
+                pub_opt = "".join(" -p %s " % p for p in pubs)
+                exit = 0
+                if "nosuchpub" in pubs:
+                        if len(pubs) > 1:
+                                # Expect partial in this case.
+                                exit = 3
+                        else:
+                                # Expect failure in this case.
+                                exit = 1
+
+                ca3_pth = os.path.join(self.pub_cas_dir, "pubCA1_ta3_cert.pem")
+                ca1_pth = os.path.join(self.pub_cas_dir, "pubCA1_ta1_cert.pem")
+
+                ca1_hsh = self.calc_file_hash(ca1_pth)
+                ca3_hsh = self.calc_file_hash(ca3_pth)
+
+                self.pkgrepo("add-signing-ca-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca3_pth), exit=exit)
+                self.pkgrepo("add-signing-ca-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca1_pth), exit=exit)
+
+                self.pkgrepo("remove-signing-intermediate-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca1_hsh), exit=exit)
+                self.pkgrepo("remove-signing-intermediate-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca3_hsh), exit=exit)
+                self.pkgrepo("remove-signing-ca-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca1_hsh), exit=exit)
+                self.pkgrepo("remove-signing-ca-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca3_hsh), exit=exit)
+
+                self.pkgrepo("add-signing-intermediate-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca3_pth), exit=exit)
+                self.pkgrepo("add-signing-intermediate-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca1_pth), exit=exit)
+
+                self.pkgrepo("remove-signing-ca-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca1_hsh), exit=exit)
+                self.pkgrepo("remove-signing-ca-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca3_hsh), exit=exit)
+                self.pkgrepo("remove-signing-intermediate-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca1_hsh), exit=exit)
+                self.pkgrepo("remove-signing-intermediate-cert -s %s %s%s" %
+                    (repo_uri, pub_opt, ca3_hsh), exit=exit)
+
         def test_07_certs(self):
                 """Verify that certificate commands work as expected."""
 
                 # Create a repository.
                 repo_path = os.path.join(self.test_root, "repo")
                 repo_uri = "file:%s" % repo_path
-                self.assert_(not os.path.exists(repo_path))
-                self.pkgrepo("-s %s create" % repo_path)
+                self.create_repo(repo_path)
 
                 ca3_pth = os.path.join(self.pub_cas_dir, "pubCA1_ta3_cert.pem")
                 ca1_pth = os.path.join(self.pub_cas_dir, "pubCA1_ta1_cert.pem")
-                
-                self.pkgrepo("-s %s add-signing-ca-cert %s" %
-                    (repo_uri, ca3_pth))
-                self.pkgrepo("-s %s add-signing-ca-cert %s" %
-                    (repo_uri, ca1_pth))
 
                 ca1_hsh = self.calc_file_hash(ca1_pth)
                 ca3_hsh = self.calc_file_hash(ca3_pth)
 
-                self.pkgrepo("-s %s remove-signing-intermediate-cert %s" %
-                    (repo_uri, ca1_hsh))
-                self.pkgrepo("-s %s remove-signing-intermediate-cert %s" %
-                    (repo_uri, ca3_hsh))
-                self.pkgrepo("-s %s remove-signing-ca-cert %s" %
-                    (repo_uri, ca1_hsh))
-                self.pkgrepo("-s %s remove-signing-ca-cert %s" %
-                    (repo_uri, ca3_hsh))
-
-                self.pkgrepo("-s %s add-signing-intermediate-cert %s" %
-                    (repo_uri, ca3_pth))
+                # Verify that signing commands will fail gracefully if no
+                # default publisher has been set and a publisher was not
+                # specified.
+                self.pkgrepo("-s %s add-signing-ca-cert %s" % (repo_uri,
+                    ca3_pth), exit=1)
                 self.pkgrepo("-s %s add-signing-intermediate-cert %s" %
-                    (repo_uri, ca1_pth))
-
-                ca1_hsh = self.calc_file_hash(ca1_pth)
-                ca3_hsh = self.calc_file_hash(ca3_pth)
-
+                    (repo_uri, ca3_pth), exit=1)
+                self.pkgrepo("-s %s remove-signing-intermediate-cert %s" %
+                    (repo_uri, ca3_hsh), exit=1)
                 self.pkgrepo("-s %s remove-signing-ca-cert %s" %
-                    (repo_uri, ca1_hsh))
-                self.pkgrepo("-s %s remove-signing-ca-cert %s" %
-                    (repo_uri, ca3_hsh))
-                self.pkgrepo("-s %s remove-signing-intermediate-cert %s" %
-                    (repo_uri, ca1_hsh))
-                self.pkgrepo("-s %s remove-signing-intermediate-cert %s" %
-                    (repo_uri, ca3_hsh))
+                    (repo_uri, ca1_hsh), exit=1)
+
+                # Now verify that add / remove work as expected for a
+                # repository.
+
+                # Test default publisher case.
+                self.pkgrepo("set -s %s publisher/prefix=test" % repo_path)
+                self.__test_add_remove_certs(repo_uri)
+
+                # Test specific publisher case.
+                shutil.rmtree(repo_path)
+                self.create_repo(repo_path)
+                self.pkgrepo("set -s %s publisher/prefix=test" % repo_path)
+                self.__test_add_remove_certs(repo_uri, ["test"])
+                self.__test_add_remove_certs(repo_uri, ["nosuchpub"])
+
+                # Test multiple publisher case.
+                shutil.rmtree(repo_path)
+                self.create_repo(repo_path)
+                self.pkgrepo("set -s %s -p test publisher/alias=test" %
+                    repo_path)
+                self.pkgrepo("set -s %s -p test2 publisher/alias=test2" %
+                    repo_path)
+                self.__test_add_remove_certs(repo_uri, ["test", "test2"])
+                self.__test_add_remove_certs(repo_uri, ["test", "test2",
+                    "nosuchpub"])
+
+                # Last, verify that add/remove works for a v3 repository.
+                shutil.rmtree(repo_path)
+                self.create_repo(repo_path, version=3)
+                self.pkgrepo("set -s %s publisher/prefix=test" % repo_path)
+                self.__test_add_remove_certs(repo_uri, ["test"])
+                self.__test_add_remove_certs(repo_uri)
+                self.__test_add_remove_certs(repo_uri, ["nosuchpub"])
+
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkgsend.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkgsend.py	Thu Aug 19 23:33:49 2010 -0700
@@ -28,7 +28,6 @@
 import pkg5unittest
 
 import os
-import os.path
 import pkg.fmri as fmri
 import pkg.manifest as manifest
 import shutil
@@ -326,6 +325,14 @@
                         self.pkgsend("file:%s%s" % (slashes, rpath), "create-repository"
                             " --set-property publisher.prefix=test")
 
+                        # Assert that create-repository creates as version 3
+                        # repository for compatibility with older consumers.
+                        for expected in ("catalog", "file", "index", "pkg",
+                            "trans", "tmp", "cfg_cache"):
+                                # A v3 repository must have all of the above.
+                                assert os.path.exists(os.path.join(rpath,
+                                    expected))
+
                         # Now verify that the repository was created by starting the
                         # depot server in readonly mode using the target repository.
                         # If it wasn't, restart_depots should fail with an exception
@@ -706,9 +713,7 @@
                             self.img_path, use_file=True), st.st_gid)
     
         def test_13_pkgsend_indexcontrol(self):
-                """Verify that "pkgsend close --no-index" suppresses
-                indexing and that "pkgsend refresh-index" triggers
-                indexing."""
+                """Verify that "pkgsend refresh-index" triggers indexing."""
 
                 dhurl = self.dc.get_depot_url()
                 dfurl = "file://%s" % urllib.pathname2url(self.dc.get_repodir())
@@ -723,7 +728,11 @@
                 self.pkgsend(dfurl, "open [email protected]")
                 self.pkgsend(dfurl, "add file %s %s path=/tmp/f.foo" \
                     % ( fpath, "mode=0755 owner=root group=bin" ))
+
+                # Verify that --no-index (even though it is now ignored) can be
+                # specified and doesn't cause pkgsend failure.
                 self.pkgsend(dfurl, "close --no-index")
+                self.wait_repo(self.dc.get_repodir())
 
                 self.dc.start()
                 self.pkg("search file:::", exit=1)
@@ -739,10 +748,10 @@
 
                 self.pkgsend(dhurl, "open [email protected]")
                 self.pkgsend(dhurl, "add file %s %s path=/tmp/f.foo" \
-                    % ( fpath, "mode=0755 owner=root group=bin" ))
-                self.pkgsend(dhurl, "close --no-index")
+                    % (fpath, "mode=0755 owner=root group=bin" ))
+                self.pkgsend(dhurl, "close")
 
-                self.dc.wait_search()
+                self.wait_repo(self.dc.get_repodir())
                 self.pkg("search http:::", exit=1)
 
                 self.pkgsend(dhurl, "refresh-index")
@@ -830,9 +839,13 @@
                                 # within the same second, so force the version
                                 # to be incremented.
                                 p2 = p.replace("<ver>", str(ver))
-                                self.pkgsend_bulk(url, p2, exit=exit)
-                                #if exit:
-                                #        self.pkgsend(url, "close -A")
+                                try:
+                                        self.pkgsend_bulk(url, p2, exit=exit)
+                                except:
+                                        self.debug("Expected exit code %s "
+                                            "while publishing %s" % (exit,
+                                            p2))
+                                        raise
 
                                 # Then do it line-by-line
                                 for i, l in enumerate(p.splitlines()):
@@ -862,21 +875,32 @@
                 f.close()
                 self.pkgsend("file://%s" % rpath,
                     "create-repository --set-property publisher.prefix=test")
-                cat_path = os.path.join(rpath, "catalog/catalog.attrs")
+
+                repo = self.dc.get_repo()
+                cat_path = repo.catalog_1("catalog.attrs")
                 mtime = os.stat(cat_path).st_mtime
-                self.pkgsend("file://%s publish --fmri-in-manifest --no-catalog %s" % (
-                                rpath, fpath))
+                self.pkgsend("file://%s" % rpath, "publish --fmri-in-manifest "
+                    "--no-catalog %s" % fpath)
                 new_mtime = os.stat(cat_path).st_mtime
-                # check that modified times are the same before and after publication
+                # Check that modified times are the same before and after
+                # publication.
                 self.assertEqual(mtime, new_mtime)
+
+                self.pkgsend("file://%s" % rpath, "open [email protected]")
+                self.pkgsend("file://%s" % rpath, "close --no-catalog")
+                new_mtime = os.stat(cat_path).st_mtime
+                # Check that modified times are the same before and after
+                # publication
+                self.assertEqual(mtime, new_mtime)
+
+                # Now start depot and verify both packages are visible when
+                # set to add content on startup.
                 self.dc.set_add_content()
-
                 self.dc.start()
-
                 dhurl = self.dc.get_depot_url()
                 self.dc.set_repodir(rpath)
                 self.image_create(dhurl)
-                self.pkg("list -a foo")
+                self.pkg("list -a bar foo")
                 self.image_destroy()
 
         def test_16_multiple_manifests(self):
--- a/src/tests/cli/t_pkgsign.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_pkgsign.py	Thu Aug 19 23:33:49 2010 -0700
@@ -1623,6 +1623,7 @@
                 # expiration and start dates.
                 self.assertRaises(apx.TransportError, self._api_install,
                     api_obj, ["var_pkg"], refresh_catalogs=False)
+
                 # Test that a TransportError from certificate retrieval is
                 # handled correctly.
                 self.pkg("install --no-refresh var_pkg", exit=1)
--- a/src/tests/cli/t_publish_api.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_publish_api.py	Thu Aug 19 23:33:49 2010 -0700
@@ -66,7 +66,7 @@
                         t = trans.Transaction(durl, pkg_name=str(pf),
                             xport=xport, pub=pub)
                         t.open()
-                        pkg_fmri, pkg_state = t.close(refresh_index=True)
+                        pkg_fmri, pkg_state = t.close()
                         self.debug("%s: %s" % (pkg_fmri, pkg_state))
 
         def test_stress_file_publish(self):
@@ -92,7 +92,7 @@
                         t = trans.Transaction(location, pkg_name=str(pf),
                             xport=xport, pub=pub)
                         t.open()
-                        pkg_fmri, pkg_state = t.close(refresh_index=True)
+                        pkg_fmri, pkg_state = t.close()
                         self.debug("%s: %s" % (pkg_fmri, pkg_state))
 
 
--- a/src/tests/cli/t_util_merge.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/cli/t_util_merge.py	Thu Aug 19 23:33:49 2010 -0700
@@ -142,8 +142,7 @@
                 path = urllib.url2pathname(parts[2])
 
                 try:
-                        return repo.Repository(auto_create=False,
-                            fork_allowed=False, repo_root=path)
+                        return repo.Repository(root=path)
                 except cfg.ConfigError, e:
                         raise repo.RepositoryError(_("The specified "
                             "repository's configuration data is not "
@@ -180,9 +179,8 @@
                 def get_expected(f):
                         exp_lines = ["set name=pkg.fmri value=%s" % f]
                         for dc in self.dcs.values():
-                                mpath = os.path.join(dc.get_repodir(),
-                                    "pkg", f.get_dir_path())
-
+                                repo = dc.get_repo()
+                                mpath = repo.manifest(f) 
                                 if not os.path.exists(mpath):
                                         # Not in this repository, check next.
                                         continue
--- a/src/tests/pkg5unittest.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/tests/pkg5unittest.py	Thu Aug 19 23:33:49 2010 -0700
@@ -28,6 +28,7 @@
 import errno
 import gettext
 import hashlib
+import logging
 import os
 import pprint
 import shutil
@@ -96,7 +97,7 @@
 
 # Version test suite is known to work with.
 PKG_CLIENT_NAME = "pkg"
-CLIENT_API_VERSION = 40
+CLIENT_API_VERSION = 42
 
 ELIDABLE_ERRORS = [ TestSkippedException, depotcontroller.DepotStateException ]
 
@@ -165,6 +166,27 @@
                 return str
 
 
+class DebugLogHandler(logging.Handler):
+        """This class is a special log handler to redirect logger output to
+        the test case class' debug() method.
+        """
+
+        def __init__(self, test_case):
+                self.test_case = test_case
+                logging.Handler.__init__(self)
+
+        def emit(self, record):
+                self.test_case.debug(record)
+
+def setup_logging(test_case):
+        # Ensure logger messages output by unit tests are redirected
+        # to debug output so they are not shown by default.
+        from pkg.client import global_settings
+        log_handler = DebugLogHandler(test_case)
+        global_settings.info_log_handler = log_handler
+        global_settings.error_log_handler = log_handler
+
+
 class Pkg5TestCase(unittest.TestCase):
 
         # Needed for compatability
@@ -179,6 +201,7 @@
                 self.__pid = os.getpid()
                 self.__pwd = os.getcwd()
                 self.__didteardown = False
+                setup_logging(self)
 
         def __str__(self):
                 return "%s.py %s.%s" % (self.__class__.__module__,
@@ -320,6 +343,7 @@
                 #
                 os.environ["TMPDIR"] = self.__test_root
                 tempfile.tempdir = self.__test_root
+                setup_logging(self)
 
         def impl_tearDown(self):
                 # impl_tearDown exists so that we can ensure that this class's
@@ -466,7 +490,7 @@
                         cmd.extend(opts)
                         cmd.append(c_path)
                         try:
-                                # Make sure to use shell=true so that env.
+                                # Make sure to use shell=True so that env.
                                 # vars and $PATH are evaluated.
                                 self.debugcmd(" ".join(cmd))
                                 s = subprocess.Popen(" ".join(cmd),
@@ -1318,9 +1342,9 @@
                     (prefix, repourl, additional_args, self.img_path)
                 self.debugcmd(cmdline)
 
-                p = subprocess.Popen(cmdline, shell = True,
-                    stdout = subprocess.PIPE,
-                    stderr = subprocess.STDOUT)
+                p = subprocess.Popen(cmdline, shell=True,
+                    stdout=subprocess.PIPE,
+                    stderr=subprocess.STDOUT)
                 output = p.stdout.read()
                 retcode = p.wait()
                 self.debugresult(retcode, 0, output)
@@ -1390,8 +1414,7 @@
                     " ".join(args))
                 return self.cmdline_run(cmdline, comment=comment, exit=exit)
 
-        def pkgsend(self, depot_url="", command="", exit=0, comment="",
-            retry400=True):
+        def pkgsend(self, depot_url="", command="", exit=0, comment=""):
                 args = []
                 if depot_url:
                         args.append("-s " + depot_url)
@@ -1429,32 +1452,17 @@
                 elif (cmdop == "generate" and retcode == 0):
                         published = out
 
-                # This may be a failure due to us being too quick to republish,
-                # and getting the same timestamp twice.  Keep trying for a
-                # little more than 1 second.
-                #
-                # This is a nasty hack which is here as a placeholder until
-                # pkgsend can give us better error granularity.
-                #
-                if cmdop in ["publish", "open"] and retry400 and \
-                    ("status '400'" in errout or
-                    "'open' failed for transaction ID" in errout) and \
-                    "already exists" in errout:
-                        time.sleep(1)
-                        return self.pkgsend(depot_url, command, exit,
-                            comment, retry400=False)
-
                 if retcode == 99:
                         raise TracebackException(cmdline, out, comment)
 
                 if retcode != exit:
                         raise UnexpectedExitCodeException(cmdline, exit,
-                            retcode, out, comment)
+                            retcode, out + errout, comment)
 
                 return retcode, published
 
         def pkgsend_bulk(self, depot_url, commands, exit=0, comment="",
-            no_catalog=False, no_index=False):
+            no_catalog=False, refresh_index=False):
                 """ Send a series of packaging commands; useful  for quickly
                     doing a bulk-load of stuff into the repo.  All commands are
                     expected to work; if not, the transaction is abandoned.  If
@@ -1471,8 +1479,6 @@
                 extra_opts = []
                 if no_catalog:
                         extra_opts.append("--no-catalog")
-                if no_index:
-                        extra_opts.append("--no-index")
                 extra_opts = " ".join(extra_opts)
 
                 plist = []
@@ -1520,13 +1526,17 @@
                                 if line.startswith("open"):
                                         current_fmri = line[5:].strip()
 
+                        if exit == 0 and refresh_index:
+                                self.pkgrepo("-s %s refresh --no-catalog" %
+                                    depot_url)
                 except UnexpectedExitCodeException, e:
                         if e.exitcode != exit:
                                 raise
                         retcode = e.exitcode
 
                 if retcode != exit:
-                        raise UnexpectedExitCodeException(line, exit, retcode)
+                        raise UnexpectedExitCodeException(line, exit, retcode,
+                            self.output + self.errout)
 
                 return plist
 
@@ -1536,17 +1546,18 @@
                 cmd = "%s %s" % (prog, " ".join(args))
                 self.cmdline_run(cmd, exit=exit)
 
-        def copy_repository(self, src, src_pub, dest, dest_pub):
+        def copy_repository(self, src, dest, pub_map):
                 """Copies the packages from the src repository to a new
                 destination repository that will be created at dest.  In
                 addition, any packages from the src_pub will be assigned
                 to the dest_pub during the copy.  The new repository will
                 not have a catalog or search indices, so a depot server
                 pointed at the new repository must be started with the
-                --rebuild option."""
+                --rebuild option.
+                """
 
                 # Preserve destination repository's cfg_cache if it exists.
-                dest_cfg = os.path.join(dest, "cfg_cache")
+                dest_cfg = os.path.join(dest, "pkg5.repository")
                 dest_cfg_data = None
                 if os.path.exists(dest_cfg):
                         with open(dest_cfg, "rb") as f:
@@ -1559,38 +1570,64 @@
                     with open(dest_cfg, "wb") as f:
                             f.write(dest_cfg_data)
 
-                for entry in os.listdir(src):
-                        spath = os.path.join(src, entry)
-
-                        # Skip the catalog, index, and pkg directories
-                        # as they will be copied manually.  Also skip
-                        # any unknown files in the repository directory.
-                        if entry in ("catalog", "index", "pkg") or \
-                            not os.path.isdir(spath):
-                                continue
-                        shutil.copytree(spath, os.path.join(dest, entry))
+                def copy_manifests(src_root, dest_root):
+                        # Now copy each manifest and replace any references to
+                        # the old publisher with that of the new publisher as
+                        # they are copied.
+                        src_pkg_root = os.path.join(src_root, "pkg")
+                        dest_pkg_root = os.path.join(dest_root, "pkg")
+                        for stem in os.listdir(src_pkg_root):
+                                src_pkg_path = os.path.join(src_pkg_root, stem)
+                                dest_pkg_path = os.path.join(dest_pkg_root,
+                                    stem)
+                                for mname in os.listdir(src_pkg_path):
+                                        # Ensure destination manifest directory
+                                        # exists.
+                                        if not os.path.isdir(dest_pkg_path):
+                                                os.makedirs(dest_pkg_path,
+                                                    mode=0755)
 
-                # Now copy each manifest and replace any references to the old
-                # publisher with that of the new publisher as they are copied.
-                pkg_root = os.path.join(src, "pkg")
-                for stem in os.listdir(pkg_root):
-                        pkg_path = os.path.join(pkg_root, stem)
-                        for mname in os.listdir(pkg_path):
-                                # Ensure destination manifest directory exists.
-                                dmdpath = os.path.join(dest, "pkg", stem)
-                                if not os.path.isdir(dmdpath):
-                                        os.makedirs(dmdpath, mode=0755)
+                                        msrc = open(os.path.join(src_pkg_path,
+                                            mname), "rb")
+                                        mdest = open(os.path.join(dest_pkg_path,
+                                            mname), "wb")
+                                        for l in msrc:
+                                                if l.find("pkg://") == -1:
+                                                        mdest.write(l)
+                                                        continue
+                                                nl = l
+                                                for src_pub in pub_map:
+                                                        nl = nl.replace(
+                                                            src_pub,
+                                                            pub_map[src_pub])
+                                                mdest.write(nl)
+                                        msrc.close()
+                                        mdest.close()
 
-                                msrc = open(os.path.join(pkg_path, mname), "rb")
-                                mdest = open(os.path.join(dmdpath, mname), "wb")
-                                for l in msrc:
-                                        if l.find("pkg://") > -1:
-                                                mdest.write(l.replace(src_pub,
-                                                    dest_pub))
-                                        else:
-                                                mdest.write(l)
-                                msrc.close()
-                                mdest.close()
+                src_pub_root = os.path.join(src, "publisher")
+                if os.path.exists(src_pub_root):
+                        dest_pub_root = os.path.join(dest, "publisher")
+                        for pub in os.listdir(src_pub_root):
+                                if pub not in pub_map:
+                                        continue
+                                src_root = os.path.join(src_pub_root, pub)
+                                dest_root = os.path.join(dest_pub_root,
+                                    pub_map[pub])
+                                for entry in os.listdir(src_root):
+                                        # Skip the catalog, index, and pkg
+                                        # directories as they will be copied
+                                        # manually.
+                                        if entry not in ("catalog", "index",
+                                            "pkg", "tmp", "trans"):
+                                                spath = os.path.join(src_root,
+                                                    entry)
+                                                dpath = os.path.join(dest_root,
+                                                    entry)
+                                                shutil.copytree(spath, dpath)
+                                                continue
+                                        if entry != "pkg":
+                                                continue
+                                        copy_manifests(src_root, dest_root)
 
         def get_img_manifest_path(self, pfmri, img_path=None):
                 """Returns the path to the manifest for the fiven fmri."""
@@ -1670,7 +1707,7 @@
                 return self.cmdline_run(cmdline, comment=comment,
                     coverage=False, exit=exit)
 
-        def create_repo(self, repodir, properties=EmptyI):
+        def create_repo(self, repodir, properties=EmptyDict, version=None):
                 """ Convenience routine to help subclasses create a package
                     repository.  Returns a pkg.server.repository.Repository
                     object. """
@@ -1678,9 +1715,14 @@
                 # Note that this must be deferred until after PYTHONPATH
                 # is set up.
                 import pkg.server.repository as sr
-                repo = sr.Repository(auto_create=True, properties=properties,
-                    repo_root=repodir)
-                self.debug("created repository %s" % repodir)
+                try:
+                        repo = sr.repository_create(repodir,
+                            properties=properties, version=version)
+                        self.debug("created repository %s" % repodir)
+                except sr.RepositoryExistsError:
+                        # Already exists.
+                        repo = sr.Repository(root=repodir,
+                            properties=properties)
                 return repo
 
         def get_repo(self, repodir, read_only=False):
@@ -1691,8 +1733,7 @@
                 # Note that this must be deferred until after PYTHONPATH
                 # is set up.
                 import pkg.server.repository as sr
-                return sr.Repository(auto_create=False, read_only=read_only,
-                    repo_root=repodir)
+                return sr.Repository(read_only=read_only, root=repodir)
 
         def prep_depot(self, port, repodir, logpath, refresh_index=False,
             debug_features=EmptyI, properties=EmptyI, start=False):
@@ -1736,6 +1777,38 @@
                         self.create_repo(repodir, properties=properties)
                 return dc
 
+        def wait_repo(self, repodir, timeout=5.0):
+                """Wait for the specified repository to complete its current
+                operations before continuing."""
+
+                check_interval = 0.20
+                time.sleep(check_interval)
+
+                begintime = time.time()
+                ready = False
+                while (time.time() - begintime) <= timeout:
+                        status = self.get_repo(repodir).get_status()
+                        rdata = status.get("repository", {})
+                        repo_status = rdata.get("status", "")
+                        if repo_status == "online":
+                                for pubdata in rdata.get("publishers",
+                                    {}).values():
+                                        if pubdata.get("status", "") != "online":
+                                                ready = False
+                                                break
+                                else:
+                                        # All repository stores were ready.
+                                        ready = True
+
+                        if not ready:
+                                time.sleep(check_interval)
+                        else:
+                                break
+
+                if not ready:
+                        raise RuntimeError("Repository readiness "
+                            "timeout exceeded.")
+
         def _api_install(self, api_obj, pkg_list, **kwargs):
                 self.debug("install %s" % " ".join(pkg_list))
                 api_obj.plan_install(pkg_list, **kwargs)
@@ -1776,15 +1849,11 @@
                         i = n + 1
                         testdir = os.path.join(self.test_root)
 
-                        repodir = os.path.join(testdir,
-                            "repo_contents%d" % i)
-
-                        for dir in (testdir, repodir):
-                                try:
-                                        os.makedirs(dir, 0755)
-                                except OSError, e:
-                                        if e.errno != errno.EEXIST:
-                                                raise e
+                        try:
+                                os.makedirs(testdir, 0755)
+                        except OSError, e:
+                                if e.errno != errno.EEXIST:
+                                        raise e
 
                         depot_logfile = os.path.join(testdir,
                             "depot_logfile%d" % i)
@@ -1793,6 +1862,7 @@
 
                         # We pick an arbitrary base port.  This could be more
                         # automated in the future.
+                        repodir = os.path.join(testdir, "repo_contents%d" % i)
                         self.dcs[i] = self.prep_depot(12000 + i, repodir,
                             depot_logfile, debug_features=debug_features,
                             properties=props, start=start_depots)
@@ -1965,9 +2035,9 @@
                         self.debugcmd(cmdline)
 
                         # Run the command to actually create a good image
-                        p = subprocess.Popen(cmdline, shell = True,
-                                             stdout = subprocess.PIPE,
-                                             stderr = subprocess.STDOUT)
+                        p = subprocess.Popen(cmdline, shell=True,
+                                             stdout=subprocess.PIPE,
+                                             stderr=subprocess.STDOUT)
                         output = p.stdout.read()
                         retcode = p.wait()
                         self.debugresult(retcode, 0, output)
--- a/src/util/distro-import/importer.py	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/util/distro-import/importer.py	Thu Aug 19 23:33:49 2010 -0700
@@ -57,7 +57,7 @@
 from pkg.misc import emsg
 from pkg.portable import PD_LOCAL_PATH, PD_PROTO_DIR, PD_PROTO_DIR_LIST
 
-CLIENT_API_VERSION = 40
+CLIENT_API_VERSION = 42
 PKG_CLIENT_NAME = "importer.py"
 pkg.client.global_settings.client_name = PKG_CLIENT_NAME
 
@@ -575,8 +575,8 @@
         assert len(svr4_traversal_dict) == len(svr4_traversal_list)
 
         t = trans.Transaction(def_repo, create_repo=file_repo,
-            refresh_index=False, pkg_name=pkg.fmristr(), noexecute=nopublish,
-            xport=xport, pub=def_pub)
+            pkg_name=pkg.fmristr(), noexecute=nopublish, xport=xport,
+            pub=def_pub)
         transaction_id = t.open()
 
         # publish easy actions
@@ -714,7 +714,7 @@
         for a in depend_actions:
                 publish_action(t, pkg, a)
 
-        pkg_fmri, pkg_state = t.close(refresh_index=False, add_to_catalog=not file_repo)
+        pkg_fmri, pkg_state = t.close(add_to_catalog=not file_repo)
         print "%s: %s\n" % (pkg_fmri, pkg_state)
 
 def search_dicts(path):
@@ -1539,7 +1539,7 @@
         xport_cfg.incoming_download_dir = incoming_dir
 
         def_pub = transport.setup_publisher(def_repo, "default", xport,
-            xport_cfg)
+            xport_cfg, remote_prefix=True)
 
         print "Seeding local SMF manifest database from %s" % def_repo
 
--- a/src/web/en/base.shtml	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/web/en/base.shtml	Thu Aug 19 23:33:49 2010 -0700
@@ -27,7 +27,7 @@
 <%namespace name="shared" file="../shared.shtml" inheritable="True"/>\
 <%page args="g_vars"/>\
 <%
-        CLIENT_API_VERSION = 10
+        CLIENT_API_VERSION = 11
         base = g_vars["base"]
         catalog = api.CatalogInterface(CLIENT_API_VERSION, base)
         config = api.ConfigInterface(CLIENT_API_VERSION, base)
@@ -37,7 +37,12 @@
         g_vars["request"] = request
 
         rpath = request.path_info.strip("/")
-        g_vars["web_config"] = { "locale-region": rpath.split("/")[0] }
+        comps = rpath.split("/")
+        pub = g_vars["pub"]
+        if pub and comps[0] == pub:
+                # Ignore publisher component of path.
+                comps.pop(0)
+        g_vars["web_config"] = { "locale-region": comps[0] }
 %>\
 <%include file="../config.shtml" args="g_vars=g_vars"/>\
 <%def name="global_menu_items(g_vars)"><%
--- a/src/web/en/catalog.shtml	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/web/en/catalog.shtml	Thu Aug 19 23:33:49 2010 -0700
@@ -39,7 +39,11 @@
 <div id="yui-main">
         <div class="yui-b">
 % if config.mirror:
-                <p>Information about packages is not available when the server is operating in mirror mode.</p>
+                <p>Information about packages is not available when the server
+ is operating in mirror mode.</p>
+% elif not request.publisher:
+                <p>This package repository is empty or no default publisher has
+ been set.</p>
 % else:
 <%
         versions = self.shared.get_releases(g_vars)
--- a/src/web/en/index.shtml	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/web/en/index.shtml	Thu Aug 19 23:33:49 2010 -0700
@@ -29,7 +29,16 @@
         request = g_vars["request"]
 %>\
 <%def name="page_title(g_vars)"><%
-        return self.shared.rcval(g_vars, "repository", "name")
+        req = g_vars["request"]
+        pub = req.publisher
+        rname = ""
+        if pub:
+                repo = pub.selected_repository
+                if repo and repo.name:
+                        rname = repo.name
+                else:
+                        rname = "package repository"
+        return rname
 %></%def>\
 <%def name="page_crumbs(g_vars)"><%
         # None at top level.
@@ -41,7 +50,13 @@
 <div id="yui-main">
         <div class="yui-b">
 <%
-        repo_desc = self.shared.rcval(g_vars, "repository", "description")
+        req = g_vars["request"]
+        pub = req.publisher
+        repo_desc = ""
+        if pub:
+                repo = pub.selected_repository
+                if repo and repo.description:
+                        repo_desc = repo.description
 %>
 % if repo_desc:
                 <h2 class="section-title">About</h2>
@@ -59,7 +74,7 @@
                         </p>
                 </form>
 % endif
-% if not config.mirror:
+% if not config.mirror and request.publisher:
 <%
         versions = self.shared.get_releases(g_vars)
         selected_version = request.params.get("version", None)
@@ -103,13 +118,17 @@
                 <p>The list of all available packages can be found
 <a href="catalog.shtml">here</a>.</p>
 %       endif
+% elif not request.publisher:
+                <p>This package repository is empty or no default publisher has
+ been set.</p>
 % else:
-                <p>Information about packages is not available when the server is operating in mirror mode.</p>
+                <p>Information about packages is not available when the server
+ is operating in mirror mode.</p>
 % endif
         </div>
 </div>
 <div class="yui-b">
-% if not config.mirror:
+% if not config.mirror and request.publisher:
         <table class="stats" summary="A summary of information about the
  package repository.">
                 <tr class="first">
@@ -121,19 +140,29 @@
                         </th>
                 </tr>
                 <tr>
+                        <td class="label">Publisher</td>
+                        <td class="value">${pub.prefix}</td>
+                </tr>
+                <tr>
                         <td class="label">Packages</td>
                         <td class="value">${catalog.package_count}</td>
                 </tr>
                 <tr>
                         <td class="label">Last Updated</td>
 <%
-        # Note: last_modified() is in local time relative to the timezone where
-        # catalog was last modified.
-        lm = catalog.last_modified
-        if lm:
-                lm = lm.replace(microsecond=0)
+        if catalog.version == 0:
+                # Note: last_modified() is in local time relative to the
+                # timezone where catalog was last modified.
+                lm = catalog.last_modified()
+                if not lm:
+                        lm = "Never"
         else:
-                lm = "Never"
+                # last_modified is a UTC datetime object.
+                lm = catalog.last_modified
+                if lm:
+                        lm = lm.replace(microsecond=0)
+                else:
+                        lm = "Never"
 %>
                         <td class="value">${lm}</td>
                 </tr>
--- a/src/web/en/search.shtml	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/web/en/search.shtml	Thu Aug 19 23:33:49 2010 -0700
@@ -112,9 +112,12 @@
         except qp.QueryException, e:
                 results = None
                 query_error = e.html()
+        except Exception, e:
+                results = None
+                query_error = urllib.quote(str(e))
 
         # Before showing the results, the type of results being shown has to be
-        # determined since the user might have overriden the return_type
+        # determined since the user might have overridden the return_type
         # selection above using query syntax.  To do that, the first result will
         # have to be checked for the real return type.
         if results:
--- a/src/web/en/stats.shtml	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/web/en/stats.shtml	Thu Aug 19 23:33:49 2010 -0700
@@ -27,13 +27,14 @@
 <%
         catalog = g_vars["catalog"]
         config = g_vars["config"]
+        request = g_vars["request"]
 %>\
 <%def name="page_title(g_vars)"><%
         return "Depot Statistics"
 %></%def>\
 <div id="yui-main" class="yui-b">
         <div class="yui-gb">
-% if not config.mirror:
+% if not config.mirror and request.publisher:
                 <div class="yui-u first">
                 <table class="stats" summary="A summary of high-level statistics
  about the package repository's catalog.">
@@ -103,10 +104,6 @@
                                 <td class="label">manifest</td>
                                 <td class="value">${config.manifest_requests}</td>
                         </tr>
-                        <tr>
-                                <td class="label">rename</td>
-                                <td class="value">${config.rename_requests}</td>
-                        </tr>
 % endif
                 </table>
                 </div>
--- a/src/web/index.shtml	Wed Aug 18 14:52:59 2010 -0700
+++ b/src/web/index.shtml	Thu Aug 19 23:33:49 2010 -0700
@@ -26,7 +26,7 @@
         import pkg.server.api as api
         import pkg.server.api_errors as api_errors
 
-        CLIENT_API_VERSION = 10
+        CLIENT_API_VERSION = 11
 %>\
 <%page args="g_vars"/>\
 <%
@@ -52,6 +52,11 @@
         # Third, determine if we need to redirect the user to a particular
         # page.
         rpath = request.path_info.strip("/")
+        pub = g_vars["pub"]
+        if pub:
+                # Ignore publisher component of path.
+                rpath = rpath.replace(pub, "").strip("/")
+
         if rpath == "":
                 rpath = "index.shtml"